googlecloud 0.0.2 → 0.0.4
Sign up to get free protection for your applications and to get access to all the features.
- data.tar.gz.sig +0 -0
- data/CHANGELOG +4 -0
- data/LICENSE +674 -0
- data/Manifest +111 -0
- data/README.md +4 -3
- data/bin/gcutil +53 -0
- data/googlecloud.gemspec +4 -3
- data/packages/gcutil-1.7.1/CHANGELOG +197 -0
- data/packages/gcutil-1.7.1/LICENSE +202 -0
- data/packages/gcutil-1.7.1/VERSION +1 -0
- data/packages/gcutil-1.7.1/gcutil +53 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/LICENSE +23 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/__init__.py +1 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/discovery.py +743 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/errors.py +123 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/ext/__init__.py +0 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/http.py +1443 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/mimeparse.py +172 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/model.py +385 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/schema.py +303 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/__init__.py +1 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/anyjson.py +32 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/appengine.py +528 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/client.py +1139 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/clientsecrets.py +105 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/crypt.py +244 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/django_orm.py +124 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/file.py +107 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/locked_file.py +343 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/multistore_file.py +379 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/oauth2client/tools.py +174 -0
- data/packages/gcutil-1.7.1/lib/google_api_python_client/uritemplate/__init__.py +147 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/LICENSE +202 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/__init__.py +3 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/__init__.py +3 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/app.py +356 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/appcommands.py +783 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/basetest.py +1260 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/datelib.py +421 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/debug.py +60 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/file_util.py +181 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/resources.py +67 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/run_script_module.py +217 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/setup_command.py +159 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/shellutil.py +49 -0
- data/packages/gcutil-1.7.1/lib/google_apputils/google/apputils/stopwatch.py +204 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/__init__.py +0 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/auth_helper.py +140 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/auth_helper_test.py +149 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/auto_auth.py +130 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/auto_auth_test.py +75 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/basic_cmds.py +128 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/basic_cmds_test.py +111 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/command_base.py +1808 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/command_base_test.py +1651 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/compute/v1beta13.json +2851 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/compute/v1beta14.json +3361 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/disk_cmds.py +342 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/disk_cmds_test.py +474 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/firewall_cmds.py +344 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/firewall_cmds_test.py +231 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/flags_cache.py +274 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/gcutil +89 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/gcutil_logging.py +69 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/image_cmds.py +262 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/image_cmds_test.py +172 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/instance_cmds.py +1506 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/instance_cmds_test.py +1904 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/kernel_cmds.py +91 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/kernel_cmds_test.py +56 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/machine_type_cmds.py +106 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/machine_type_cmds_test.py +59 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/metadata.py +96 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/metadata_lib.py +357 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/metadata_test.py +84 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/mock_api.py +420 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/mock_metadata.py +58 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/move_cmds.py +824 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/move_cmds_test.py +307 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/network_cmds.py +178 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/network_cmds_test.py +133 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/operation_cmds.py +181 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/operation_cmds_test.py +196 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/path_initializer.py +38 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/project_cmds.py +173 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/project_cmds_test.py +111 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/scopes.py +61 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/scopes_test.py +50 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/snapshot_cmds.py +276 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/snapshot_cmds_test.py +260 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/ssh_keys.py +266 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/ssh_keys_test.py +128 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/table_formatter.py +563 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/thread_pool.py +188 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/thread_pool_test.py +88 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/utils.py +208 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/utils_test.py +193 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/version.py +17 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/version_checker.py +246 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/version_checker_test.py +271 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/zone_cmds.py +151 -0
- data/packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/zone_cmds_test.py +60 -0
- data/packages/gcutil-1.7.1/lib/httplib2/LICENSE +21 -0
- data/packages/gcutil-1.7.1/lib/httplib2/httplib2/__init__.py +1630 -0
- data/packages/gcutil-1.7.1/lib/httplib2/httplib2/cacerts.txt +714 -0
- data/packages/gcutil-1.7.1/lib/httplib2/httplib2/iri2uri.py +110 -0
- data/packages/gcutil-1.7.1/lib/httplib2/httplib2/socks.py +438 -0
- data/packages/gcutil-1.7.1/lib/iso8601/LICENSE +20 -0
- data/packages/gcutil-1.7.1/lib/iso8601/iso8601/__init__.py +1 -0
- data/packages/gcutil-1.7.1/lib/iso8601/iso8601/iso8601.py +102 -0
- data/packages/gcutil-1.7.1/lib/iso8601/iso8601/test_iso8601.py +111 -0
- data/packages/gcutil-1.7.1/lib/python_gflags/AUTHORS +2 -0
- data/packages/gcutil-1.7.1/lib/python_gflags/LICENSE +28 -0
- data/packages/gcutil-1.7.1/lib/python_gflags/gflags.py +2862 -0
- data/packages/gcutil-1.7.1/lib/python_gflags/gflags2man.py +544 -0
- data/packages/gcutil-1.7.1/lib/python_gflags/gflags_validators.py +187 -0
- metadata +118 -5
- metadata.gz.sig +0 -0
@@ -0,0 +1,21 @@
|
|
1
|
+
Copyright (c) 2006 by Joe Gregorio
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person
|
4
|
+
obtaining a copy of this software and associated documentation
|
5
|
+
files (the "Software"), to deal in the Software without restriction,
|
6
|
+
including without limitation the rights to use, copy, modify, merge,
|
7
|
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
8
|
+
and to permit persons to whom the Software is furnished to do so,
|
9
|
+
subject to the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be
|
12
|
+
included in all copies or substantial portions of the Software.
|
13
|
+
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
15
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
16
|
+
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
17
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
18
|
+
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
19
|
+
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
20
|
+
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
@@ -0,0 +1,1630 @@
|
|
1
|
+
from __future__ import generators
|
2
|
+
"""
|
3
|
+
httplib2
|
4
|
+
|
5
|
+
A caching http interface that supports ETags and gzip
|
6
|
+
to conserve bandwidth.
|
7
|
+
|
8
|
+
Requires Python 2.3 or later
|
9
|
+
|
10
|
+
Changelog:
|
11
|
+
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
|
12
|
+
|
13
|
+
"""
|
14
|
+
|
15
|
+
__author__ = "Joe Gregorio (joe@bitworking.org)"
|
16
|
+
__copyright__ = "Copyright 2006, Joe Gregorio"
|
17
|
+
__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
|
18
|
+
"James Antill",
|
19
|
+
"Xavier Verges Farrero",
|
20
|
+
"Jonathan Feinberg",
|
21
|
+
"Blair Zajac",
|
22
|
+
"Sam Ruby",
|
23
|
+
"Louis Nyffenegger"]
|
24
|
+
__license__ = "MIT"
|
25
|
+
__version__ = "0.7.4"
|
26
|
+
|
27
|
+
import re
|
28
|
+
import sys
|
29
|
+
import email
|
30
|
+
import email.Utils
|
31
|
+
import email.Message
|
32
|
+
import email.FeedParser
|
33
|
+
import StringIO
|
34
|
+
import gzip
|
35
|
+
import zlib
|
36
|
+
import httplib
|
37
|
+
import urlparse
|
38
|
+
import urllib
|
39
|
+
import base64
|
40
|
+
import os
|
41
|
+
import copy
|
42
|
+
import calendar
|
43
|
+
import time
|
44
|
+
import random
|
45
|
+
import errno
|
46
|
+
try:
|
47
|
+
from hashlib import sha1 as _sha, md5 as _md5
|
48
|
+
except ImportError:
|
49
|
+
# prior to Python 2.5, these were separate modules
|
50
|
+
import sha
|
51
|
+
import md5
|
52
|
+
_sha = sha.new
|
53
|
+
_md5 = md5.new
|
54
|
+
import hmac
|
55
|
+
from gettext import gettext as _
|
56
|
+
import socket
|
57
|
+
|
58
|
+
try:
|
59
|
+
from httplib2 import socks
|
60
|
+
except ImportError:
|
61
|
+
try:
|
62
|
+
import socks
|
63
|
+
except ImportError:
|
64
|
+
socks = None
|
65
|
+
|
66
|
+
# Build the appropriate socket wrapper for ssl
|
67
|
+
try:
|
68
|
+
import ssl # python 2.6
|
69
|
+
ssl_SSLError = ssl.SSLError
|
70
|
+
def _ssl_wrap_socket(sock, key_file, cert_file,
|
71
|
+
disable_validation, ca_certs):
|
72
|
+
if disable_validation:
|
73
|
+
cert_reqs = ssl.CERT_NONE
|
74
|
+
else:
|
75
|
+
cert_reqs = ssl.CERT_REQUIRED
|
76
|
+
# We should be specifying SSL version 3 or TLS v1, but the ssl module
|
77
|
+
# doesn't expose the necessary knobs. So we need to go with the default
|
78
|
+
# of SSLv23.
|
79
|
+
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
|
80
|
+
cert_reqs=cert_reqs, ca_certs=ca_certs)
|
81
|
+
except (AttributeError, ImportError):
|
82
|
+
ssl_SSLError = None
|
83
|
+
def _ssl_wrap_socket(sock, key_file, cert_file,
|
84
|
+
disable_validation, ca_certs):
|
85
|
+
if not disable_validation:
|
86
|
+
raise CertificateValidationUnsupported(
|
87
|
+
"SSL certificate validation is not supported without "
|
88
|
+
"the ssl module installed. To avoid this error, install "
|
89
|
+
"the ssl module, or explicity disable validation.")
|
90
|
+
ssl_sock = socket.ssl(sock, key_file, cert_file)
|
91
|
+
return httplib.FakeSocket(sock, ssl_sock)
|
92
|
+
|
93
|
+
|
94
|
+
if sys.version_info >= (2,3):
|
95
|
+
from iri2uri import iri2uri
|
96
|
+
else:
|
97
|
+
def iri2uri(uri):
|
98
|
+
return uri
|
99
|
+
|
100
|
+
def has_timeout(timeout): # python 2.6
|
101
|
+
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
|
102
|
+
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
|
103
|
+
return (timeout is not None)
|
104
|
+
|
105
|
+
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
|
106
|
+
'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
|
107
|
+
'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
|
108
|
+
'debuglevel', 'ProxiesUnavailableError']
|
109
|
+
|
110
|
+
|
111
|
+
# The httplib debug level, set to a non-zero value to get debug output
|
112
|
+
debuglevel = 0
|
113
|
+
|
114
|
+
|
115
|
+
# Python 2.3 support
|
116
|
+
if sys.version_info < (2,4):
|
117
|
+
def sorted(seq):
|
118
|
+
seq.sort()
|
119
|
+
return seq
|
120
|
+
|
121
|
+
# Python 2.3 support
|
122
|
+
def HTTPResponse__getheaders(self):
|
123
|
+
"""Return list of (header, value) tuples."""
|
124
|
+
if self.msg is None:
|
125
|
+
raise httplib.ResponseNotReady()
|
126
|
+
return self.msg.items()
|
127
|
+
|
128
|
+
if not hasattr(httplib.HTTPResponse, 'getheaders'):
|
129
|
+
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
|
130
|
+
|
131
|
+
# All exceptions raised here derive from HttpLib2Error
|
132
|
+
class HttpLib2Error(Exception): pass
|
133
|
+
|
134
|
+
# Some exceptions can be caught and optionally
|
135
|
+
# be turned back into responses.
|
136
|
+
class HttpLib2ErrorWithResponse(HttpLib2Error):
|
137
|
+
def __init__(self, desc, response, content):
|
138
|
+
self.response = response
|
139
|
+
self.content = content
|
140
|
+
HttpLib2Error.__init__(self, desc)
|
141
|
+
|
142
|
+
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
|
143
|
+
class RedirectLimit(HttpLib2ErrorWithResponse): pass
|
144
|
+
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
|
145
|
+
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
|
146
|
+
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
|
147
|
+
|
148
|
+
class MalformedHeader(HttpLib2Error): pass
|
149
|
+
class RelativeURIError(HttpLib2Error): pass
|
150
|
+
class ServerNotFoundError(HttpLib2Error): pass
|
151
|
+
class ProxiesUnavailableError(HttpLib2Error): pass
|
152
|
+
class CertificateValidationUnsupported(HttpLib2Error): pass
|
153
|
+
class SSLHandshakeError(HttpLib2Error): pass
|
154
|
+
class NotSupportedOnThisPlatform(HttpLib2Error): pass
|
155
|
+
class CertificateHostnameMismatch(SSLHandshakeError):
|
156
|
+
def __init__(self, desc, host, cert):
|
157
|
+
HttpLib2Error.__init__(self, desc)
|
158
|
+
self.host = host
|
159
|
+
self.cert = cert
|
160
|
+
|
161
|
+
# Open Items:
|
162
|
+
# -----------
|
163
|
+
# Proxy support
|
164
|
+
|
165
|
+
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
|
166
|
+
|
167
|
+
# Pluggable cache storage (supports storing the cache in
|
168
|
+
# flat files by default. We need a plug-in architecture
|
169
|
+
# that can support Berkeley DB and Squid)
|
170
|
+
|
171
|
+
# == Known Issues ==
|
172
|
+
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
|
173
|
+
# Does not handle Cache-Control: max-stale
|
174
|
+
# Does not use Age: headers when calculating cache freshness.
|
175
|
+
|
176
|
+
|
177
|
+
# The number of redirections to follow before giving up.
|
178
|
+
# Note that only GET redirects are automatically followed.
|
179
|
+
# Will also honor 301 requests by saving that info and never
|
180
|
+
# requesting that URI again.
|
181
|
+
DEFAULT_MAX_REDIRECTS = 5
|
182
|
+
|
183
|
+
# Default CA certificates file bundled with httplib2.
|
184
|
+
CA_CERTS = os.path.join(
|
185
|
+
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
|
186
|
+
|
187
|
+
# Which headers are hop-by-hop headers by default
|
188
|
+
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
|
189
|
+
|
190
|
+
def _get_end2end_headers(response):
|
191
|
+
hopbyhop = list(HOP_BY_HOP)
|
192
|
+
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
|
193
|
+
return [header for header in response.keys() if header not in hopbyhop]
|
194
|
+
|
195
|
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
196
|
+
|
197
|
+
def parse_uri(uri):
|
198
|
+
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
199
|
+
|
200
|
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
201
|
+
"""
|
202
|
+
groups = URI.match(uri).groups()
|
203
|
+
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
204
|
+
|
205
|
+
def urlnorm(uri):
|
206
|
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
207
|
+
if not scheme or not authority:
|
208
|
+
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
|
209
|
+
authority = authority.lower()
|
210
|
+
scheme = scheme.lower()
|
211
|
+
if not path:
|
212
|
+
path = "/"
|
213
|
+
# Could do syntax based normalization of the URI before
|
214
|
+
# computing the digest. See Section 6.2.2 of Std 66.
|
215
|
+
request_uri = query and "?".join([path, query]) or path
|
216
|
+
scheme = scheme.lower()
|
217
|
+
defrag_uri = scheme + "://" + authority + request_uri
|
218
|
+
return scheme, authority, request_uri, defrag_uri
|
219
|
+
|
220
|
+
|
221
|
+
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
|
222
|
+
re_url_scheme = re.compile(r'^\w+://')
|
223
|
+
re_slash = re.compile(r'[?/:|]+')
|
224
|
+
|
225
|
+
def safename(filename):
|
226
|
+
"""Return a filename suitable for the cache.
|
227
|
+
|
228
|
+
Strips dangerous and common characters to create a filename we
|
229
|
+
can use to store the cache in.
|
230
|
+
"""
|
231
|
+
|
232
|
+
try:
|
233
|
+
if re_url_scheme.match(filename):
|
234
|
+
if isinstance(filename,str):
|
235
|
+
filename = filename.decode('utf-8')
|
236
|
+
filename = filename.encode('idna')
|
237
|
+
else:
|
238
|
+
filename = filename.encode('idna')
|
239
|
+
except UnicodeError:
|
240
|
+
pass
|
241
|
+
if isinstance(filename,unicode):
|
242
|
+
filename=filename.encode('utf-8')
|
243
|
+
filemd5 = _md5(filename).hexdigest()
|
244
|
+
filename = re_url_scheme.sub("", filename)
|
245
|
+
filename = re_slash.sub(",", filename)
|
246
|
+
|
247
|
+
# limit length of filename
|
248
|
+
if len(filename)>200:
|
249
|
+
filename=filename[:200]
|
250
|
+
return ",".join((filename, filemd5))
|
251
|
+
|
252
|
+
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
|
253
|
+
def _normalize_headers(headers):
|
254
|
+
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
|
255
|
+
|
256
|
+
def _parse_cache_control(headers):
|
257
|
+
retval = {}
|
258
|
+
if headers.has_key('cache-control'):
|
259
|
+
parts = headers['cache-control'].split(',')
|
260
|
+
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
|
261
|
+
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
|
262
|
+
retval = dict(parts_with_args + parts_wo_args)
|
263
|
+
return retval
|
264
|
+
|
265
|
+
# Whether to use a strict mode to parse WWW-Authenticate headers
|
266
|
+
# Might lead to bad results in case of ill-formed header value,
|
267
|
+
# so disabled by default, falling back to relaxed parsing.
|
268
|
+
# Set to true to turn on, usefull for testing servers.
|
269
|
+
USE_WWW_AUTH_STRICT_PARSING = 0
|
270
|
+
|
271
|
+
# In regex below:
|
272
|
+
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
|
273
|
+
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
|
274
|
+
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
|
275
|
+
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
|
276
|
+
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
|
277
|
+
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
|
278
|
+
UNQUOTE_PAIRS = re.compile(r'\\(.)')
|
279
|
+
def _parse_www_authenticate(headers, headername='www-authenticate'):
|
280
|
+
"""Returns a dictionary of dictionaries, one dict
|
281
|
+
per auth_scheme."""
|
282
|
+
retval = {}
|
283
|
+
if headers.has_key(headername):
|
284
|
+
try:
|
285
|
+
authenticate = headers[headername].strip()
|
286
|
+
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
|
287
|
+
while authenticate:
|
288
|
+
# Break off the scheme at the beginning of the line
|
289
|
+
if headername == 'authentication-info':
|
290
|
+
(auth_scheme, the_rest) = ('digest', authenticate)
|
291
|
+
else:
|
292
|
+
(auth_scheme, the_rest) = authenticate.split(" ", 1)
|
293
|
+
# Now loop over all the key value pairs that come after the scheme,
|
294
|
+
# being careful not to roll into the next scheme
|
295
|
+
match = www_auth.search(the_rest)
|
296
|
+
auth_params = {}
|
297
|
+
while match:
|
298
|
+
if match and len(match.groups()) == 3:
|
299
|
+
(key, value, the_rest) = match.groups()
|
300
|
+
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
|
301
|
+
match = www_auth.search(the_rest)
|
302
|
+
retval[auth_scheme.lower()] = auth_params
|
303
|
+
authenticate = the_rest.strip()
|
304
|
+
except ValueError:
|
305
|
+
raise MalformedHeader("WWW-Authenticate")
|
306
|
+
return retval
|
307
|
+
|
308
|
+
|
309
|
+
def _entry_disposition(response_headers, request_headers):
|
310
|
+
"""Determine freshness from the Date, Expires and Cache-Control headers.
|
311
|
+
|
312
|
+
We don't handle the following:
|
313
|
+
|
314
|
+
1. Cache-Control: max-stale
|
315
|
+
2. Age: headers are not used in the calculations.
|
316
|
+
|
317
|
+
Not that this algorithm is simpler than you might think
|
318
|
+
because we are operating as a private (non-shared) cache.
|
319
|
+
This lets us ignore 's-maxage'. We can also ignore
|
320
|
+
'proxy-invalidate' since we aren't a proxy.
|
321
|
+
We will never return a stale document as
|
322
|
+
fresh as a design decision, and thus the non-implementation
|
323
|
+
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
|
324
|
+
since we operate as if every server has sent 'must-revalidate'.
|
325
|
+
Since we are private we get to ignore both 'public' and
|
326
|
+
'private' parameters. We also ignore 'no-transform' since
|
327
|
+
we don't do any transformations.
|
328
|
+
The 'no-store' parameter is handled at a higher level.
|
329
|
+
So the only Cache-Control parameters we look at are:
|
330
|
+
|
331
|
+
no-cache
|
332
|
+
only-if-cached
|
333
|
+
max-age
|
334
|
+
min-fresh
|
335
|
+
"""
|
336
|
+
|
337
|
+
retval = "STALE"
|
338
|
+
cc = _parse_cache_control(request_headers)
|
339
|
+
cc_response = _parse_cache_control(response_headers)
|
340
|
+
|
341
|
+
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
|
342
|
+
retval = "TRANSPARENT"
|
343
|
+
if 'cache-control' not in request_headers:
|
344
|
+
request_headers['cache-control'] = 'no-cache'
|
345
|
+
elif cc.has_key('no-cache'):
|
346
|
+
retval = "TRANSPARENT"
|
347
|
+
elif cc_response.has_key('no-cache'):
|
348
|
+
retval = "STALE"
|
349
|
+
elif cc.has_key('only-if-cached'):
|
350
|
+
retval = "FRESH"
|
351
|
+
elif response_headers.has_key('date'):
|
352
|
+
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
|
353
|
+
now = time.time()
|
354
|
+
current_age = max(0, now - date)
|
355
|
+
if cc_response.has_key('max-age'):
|
356
|
+
try:
|
357
|
+
freshness_lifetime = int(cc_response['max-age'])
|
358
|
+
except ValueError:
|
359
|
+
freshness_lifetime = 0
|
360
|
+
elif response_headers.has_key('expires'):
|
361
|
+
expires = email.Utils.parsedate_tz(response_headers['expires'])
|
362
|
+
if None == expires:
|
363
|
+
freshness_lifetime = 0
|
364
|
+
else:
|
365
|
+
freshness_lifetime = max(0, calendar.timegm(expires) - date)
|
366
|
+
else:
|
367
|
+
freshness_lifetime = 0
|
368
|
+
if cc.has_key('max-age'):
|
369
|
+
try:
|
370
|
+
freshness_lifetime = int(cc['max-age'])
|
371
|
+
except ValueError:
|
372
|
+
freshness_lifetime = 0
|
373
|
+
if cc.has_key('min-fresh'):
|
374
|
+
try:
|
375
|
+
min_fresh = int(cc['min-fresh'])
|
376
|
+
except ValueError:
|
377
|
+
min_fresh = 0
|
378
|
+
current_age += min_fresh
|
379
|
+
if freshness_lifetime > current_age:
|
380
|
+
retval = "FRESH"
|
381
|
+
return retval
|
382
|
+
|
383
|
+
def _decompressContent(response, new_content):
|
384
|
+
content = new_content
|
385
|
+
try:
|
386
|
+
encoding = response.get('content-encoding', None)
|
387
|
+
if encoding in ['gzip', 'deflate']:
|
388
|
+
if encoding == 'gzip':
|
389
|
+
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
|
390
|
+
if encoding == 'deflate':
|
391
|
+
content = zlib.decompress(content)
|
392
|
+
response['content-length'] = str(len(content))
|
393
|
+
# Record the historical presence of the encoding in a way the won't interfere.
|
394
|
+
response['-content-encoding'] = response['content-encoding']
|
395
|
+
del response['content-encoding']
|
396
|
+
except IOError:
|
397
|
+
content = ""
|
398
|
+
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
|
399
|
+
return content
|
400
|
+
|
401
|
+
def _updateCache(request_headers, response_headers, content, cache, cachekey):
|
402
|
+
if cachekey:
|
403
|
+
cc = _parse_cache_control(request_headers)
|
404
|
+
cc_response = _parse_cache_control(response_headers)
|
405
|
+
if cc.has_key('no-store') or cc_response.has_key('no-store'):
|
406
|
+
cache.delete(cachekey)
|
407
|
+
else:
|
408
|
+
info = email.Message.Message()
|
409
|
+
for key, value in response_headers.iteritems():
|
410
|
+
if key not in ['status','content-encoding','transfer-encoding']:
|
411
|
+
info[key] = value
|
412
|
+
|
413
|
+
# Add annotations to the cache to indicate what headers
|
414
|
+
# are variant for this request.
|
415
|
+
vary = response_headers.get('vary', None)
|
416
|
+
if vary:
|
417
|
+
vary_headers = vary.lower().replace(' ', '').split(',')
|
418
|
+
for header in vary_headers:
|
419
|
+
key = '-varied-%s' % header
|
420
|
+
try:
|
421
|
+
info[key] = request_headers[header]
|
422
|
+
except KeyError:
|
423
|
+
pass
|
424
|
+
|
425
|
+
status = response_headers.status
|
426
|
+
if status == 304:
|
427
|
+
status = 200
|
428
|
+
|
429
|
+
status_header = 'status: %d\r\n' % status
|
430
|
+
|
431
|
+
header_str = info.as_string()
|
432
|
+
|
433
|
+
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
|
434
|
+
text = "".join([status_header, header_str, content])
|
435
|
+
|
436
|
+
cache.set(cachekey, text)
|
437
|
+
|
438
|
+
def _cnonce():
|
439
|
+
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
|
440
|
+
return dig[:16]
|
441
|
+
|
442
|
+
def _wsse_username_token(cnonce, iso_now, password):
|
443
|
+
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
|
444
|
+
|
445
|
+
|
446
|
+
# For credentials we need two things, first
|
447
|
+
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
|
448
|
+
# Then we also need a list of URIs that have already demanded authentication
|
449
|
+
# That list is tricky since sub-URIs can take the same auth, or the
|
450
|
+
# auth scheme may change as you descend the tree.
|
451
|
+
# So we also need each Auth instance to be able to tell us
|
452
|
+
# how close to the 'top' it is.
|
453
|
+
|
454
|
+
class Authentication(object):
|
455
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
456
|
+
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
|
457
|
+
self.path = path
|
458
|
+
self.host = host
|
459
|
+
self.credentials = credentials
|
460
|
+
self.http = http
|
461
|
+
|
462
|
+
def depth(self, request_uri):
|
463
|
+
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
|
464
|
+
return request_uri[len(self.path):].count("/")
|
465
|
+
|
466
|
+
def inscope(self, host, request_uri):
|
467
|
+
# XXX Should we normalize the request_uri?
|
468
|
+
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
|
469
|
+
return (host == self.host) and path.startswith(self.path)
|
470
|
+
|
471
|
+
def request(self, method, request_uri, headers, content):
|
472
|
+
"""Modify the request headers to add the appropriate
|
473
|
+
Authorization header. Over-rise this in sub-classes."""
|
474
|
+
pass
|
475
|
+
|
476
|
+
def response(self, response, content):
|
477
|
+
"""Gives us a chance to update with new nonces
|
478
|
+
or such returned from the last authorized response.
|
479
|
+
Over-rise this in sub-classes if necessary.
|
480
|
+
|
481
|
+
Return TRUE is the request is to be retried, for
|
482
|
+
example Digest may return stale=true.
|
483
|
+
"""
|
484
|
+
return False
|
485
|
+
|
486
|
+
|
487
|
+
|
488
|
+
class BasicAuthentication(Authentication):
|
489
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
490
|
+
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
|
491
|
+
|
492
|
+
def request(self, method, request_uri, headers, content):
|
493
|
+
"""Modify the request headers to add the appropriate
|
494
|
+
Authorization header."""
|
495
|
+
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
|
496
|
+
|
497
|
+
|
498
|
+
class DigestAuthentication(Authentication):
|
499
|
+
"""Only do qop='auth' and MD5, since that
|
500
|
+
is all Apache currently implements"""
|
501
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
502
|
+
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
|
503
|
+
challenge = _parse_www_authenticate(response, 'www-authenticate')
|
504
|
+
self.challenge = challenge['digest']
|
505
|
+
qop = self.challenge.get('qop', 'auth')
|
506
|
+
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
|
507
|
+
if self.challenge['qop'] is None:
|
508
|
+
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
|
509
|
+
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
|
510
|
+
if self.challenge['algorithm'] != 'MD5':
|
511
|
+
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
|
512
|
+
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
|
513
|
+
self.challenge['nc'] = 1
|
514
|
+
|
515
|
+
def request(self, method, request_uri, headers, content, cnonce = None):
|
516
|
+
"""Modify the request headers"""
|
517
|
+
H = lambda x: _md5(x).hexdigest()
|
518
|
+
KD = lambda s, d: H("%s:%s" % (s, d))
|
519
|
+
A2 = "".join([method, ":", request_uri])
|
520
|
+
self.challenge['cnonce'] = cnonce or _cnonce()
|
521
|
+
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'],
|
522
|
+
'%08x' % self.challenge['nc'],
|
523
|
+
self.challenge['cnonce'],
|
524
|
+
self.challenge['qop'], H(A2)
|
525
|
+
))
|
526
|
+
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
|
527
|
+
self.credentials[0],
|
528
|
+
self.challenge['realm'],
|
529
|
+
self.challenge['nonce'],
|
530
|
+
request_uri,
|
531
|
+
self.challenge['algorithm'],
|
532
|
+
request_digest,
|
533
|
+
self.challenge['qop'],
|
534
|
+
self.challenge['nc'],
|
535
|
+
self.challenge['cnonce'],
|
536
|
+
)
|
537
|
+
if self.challenge.get('opaque'):
|
538
|
+
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
|
539
|
+
self.challenge['nc'] += 1
|
540
|
+
|
541
|
+
def response(self, response, content):
|
542
|
+
if not response.has_key('authentication-info'):
|
543
|
+
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
|
544
|
+
if 'true' == challenge.get('stale'):
|
545
|
+
self.challenge['nonce'] = challenge['nonce']
|
546
|
+
self.challenge['nc'] = 1
|
547
|
+
return True
|
548
|
+
else:
|
549
|
+
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
|
550
|
+
|
551
|
+
if updated_challenge.has_key('nextnonce'):
|
552
|
+
self.challenge['nonce'] = updated_challenge['nextnonce']
|
553
|
+
self.challenge['nc'] = 1
|
554
|
+
return False
|
555
|
+
|
556
|
+
|
557
|
+
class HmacDigestAuthentication(Authentication):
|
558
|
+
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
|
559
|
+
__author__ = "Thomas Broyer (t.broyer@ltgt.net)"
|
560
|
+
|
561
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
562
|
+
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
|
563
|
+
challenge = _parse_www_authenticate(response, 'www-authenticate')
|
564
|
+
self.challenge = challenge['hmacdigest']
|
565
|
+
# TODO: self.challenge['domain']
|
566
|
+
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
|
567
|
+
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
|
568
|
+
self.challenge['reason'] = 'unauthorized'
|
569
|
+
self.challenge['salt'] = self.challenge.get('salt', '')
|
570
|
+
if not self.challenge.get('snonce'):
|
571
|
+
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
|
572
|
+
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
|
573
|
+
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
|
574
|
+
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
|
575
|
+
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
|
576
|
+
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
|
577
|
+
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
|
578
|
+
if self.challenge['algorithm'] == 'HMAC-MD5':
|
579
|
+
self.hashmod = _md5
|
580
|
+
else:
|
581
|
+
self.hashmod = _sha
|
582
|
+
if self.challenge['pw-algorithm'] == 'MD5':
|
583
|
+
self.pwhashmod = _md5
|
584
|
+
else:
|
585
|
+
self.pwhashmod = _sha
|
586
|
+
self.key = "".join([self.credentials[0], ":",
|
587
|
+
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
|
588
|
+
":", self.challenge['realm']
|
589
|
+
])
|
590
|
+
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
|
591
|
+
|
592
|
+
def request(self, method, request_uri, headers, content):
|
593
|
+
"""Modify the request headers"""
|
594
|
+
keys = _get_end2end_headers(headers)
|
595
|
+
keylist = "".join(["%s " % k for k in keys])
|
596
|
+
headers_val = "".join([headers[k] for k in keys])
|
597
|
+
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
|
598
|
+
cnonce = _cnonce()
|
599
|
+
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
|
600
|
+
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
|
601
|
+
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
|
602
|
+
self.credentials[0],
|
603
|
+
self.challenge['realm'],
|
604
|
+
self.challenge['snonce'],
|
605
|
+
cnonce,
|
606
|
+
request_uri,
|
607
|
+
created,
|
608
|
+
request_digest,
|
609
|
+
keylist,
|
610
|
+
)
|
611
|
+
|
612
|
+
def response(self, response, content):
|
613
|
+
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
|
614
|
+
if challenge.get('reason') in ['integrity', 'stale']:
|
615
|
+
return True
|
616
|
+
return False
|
617
|
+
|
618
|
+
|
619
|
+
class WsseAuthentication(Authentication):
|
620
|
+
"""This is thinly tested and should not be relied upon.
|
621
|
+
At this time there isn't any third party server to test against.
|
622
|
+
Blogger and TypePad implemented this algorithm at one point
|
623
|
+
but Blogger has since switched to Basic over HTTPS and
|
624
|
+
TypePad has implemented it wrong, by never issuing a 401
|
625
|
+
challenge but instead requiring your client to telepathically know that
|
626
|
+
their endpoint is expecting WSSE profile="UsernameToken"."""
|
627
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
628
|
+
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
|
629
|
+
|
630
|
+
def request(self, method, request_uri, headers, content):
|
631
|
+
"""Modify the request headers to add the appropriate
|
632
|
+
Authorization header."""
|
633
|
+
headers['authorization'] = 'WSSE profile="UsernameToken"'
|
634
|
+
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
635
|
+
cnonce = _cnonce()
|
636
|
+
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
|
637
|
+
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
|
638
|
+
self.credentials[0],
|
639
|
+
password_digest,
|
640
|
+
cnonce,
|
641
|
+
iso_now)
|
642
|
+
|
643
|
+
class GoogleLoginAuthentication(Authentication):
|
644
|
+
def __init__(self, credentials, host, request_uri, headers, response, content, http):
|
645
|
+
from urllib import urlencode
|
646
|
+
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
|
647
|
+
challenge = _parse_www_authenticate(response, 'www-authenticate')
|
648
|
+
service = challenge['googlelogin'].get('service', 'xapi')
|
649
|
+
# Bloggger actually returns the service in the challenge
|
650
|
+
# For the rest we guess based on the URI
|
651
|
+
if service == 'xapi' and request_uri.find("calendar") > 0:
|
652
|
+
service = "cl"
|
653
|
+
# No point in guessing Base or Spreadsheet
|
654
|
+
#elif request_uri.find("spreadsheets") > 0:
|
655
|
+
# service = "wise"
|
656
|
+
|
657
|
+
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
|
658
|
+
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
|
659
|
+
lines = content.split('\n')
|
660
|
+
d = dict([tuple(line.split("=", 1)) for line in lines if line])
|
661
|
+
if resp.status == 403:
|
662
|
+
self.Auth = ""
|
663
|
+
else:
|
664
|
+
self.Auth = d['Auth']
|
665
|
+
|
666
|
+
def request(self, method, request_uri, headers, content):
|
667
|
+
"""Modify the request headers to add the appropriate
|
668
|
+
Authorization header."""
|
669
|
+
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
|
670
|
+
|
671
|
+
|
672
|
+
AUTH_SCHEME_CLASSES = {
|
673
|
+
"basic": BasicAuthentication,
|
674
|
+
"wsse": WsseAuthentication,
|
675
|
+
"digest": DigestAuthentication,
|
676
|
+
"hmacdigest": HmacDigestAuthentication,
|
677
|
+
"googlelogin": GoogleLoginAuthentication
|
678
|
+
}
|
679
|
+
|
680
|
+
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
|
681
|
+
|
682
|
+
class FileCache(object):
|
683
|
+
"""Uses a local directory as a store for cached files.
|
684
|
+
Not really safe to use if multiple threads or processes are going to
|
685
|
+
be running on the same cache.
|
686
|
+
"""
|
687
|
+
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
|
688
|
+
self.cache = cache
|
689
|
+
self.safe = safe
|
690
|
+
if not os.path.exists(cache):
|
691
|
+
os.makedirs(self.cache)
|
692
|
+
|
693
|
+
def get(self, key):
|
694
|
+
retval = None
|
695
|
+
cacheFullPath = os.path.join(self.cache, self.safe(key))
|
696
|
+
try:
|
697
|
+
f = file(cacheFullPath, "rb")
|
698
|
+
retval = f.read()
|
699
|
+
f.close()
|
700
|
+
except IOError:
|
701
|
+
pass
|
702
|
+
return retval
|
703
|
+
|
704
|
+
def set(self, key, value):
|
705
|
+
cacheFullPath = os.path.join(self.cache, self.safe(key))
|
706
|
+
f = file(cacheFullPath, "wb")
|
707
|
+
f.write(value)
|
708
|
+
f.close()
|
709
|
+
|
710
|
+
def delete(self, key):
|
711
|
+
cacheFullPath = os.path.join(self.cache, self.safe(key))
|
712
|
+
if os.path.exists(cacheFullPath):
|
713
|
+
os.remove(cacheFullPath)
|
714
|
+
|
715
|
+
class Credentials(object):
|
716
|
+
def __init__(self):
|
717
|
+
self.credentials = []
|
718
|
+
|
719
|
+
def add(self, name, password, domain=""):
|
720
|
+
self.credentials.append((domain.lower(), name, password))
|
721
|
+
|
722
|
+
def clear(self):
|
723
|
+
self.credentials = []
|
724
|
+
|
725
|
+
def iter(self, domain):
|
726
|
+
for (cdomain, name, password) in self.credentials:
|
727
|
+
if cdomain == "" or domain == cdomain:
|
728
|
+
yield (name, password)
|
729
|
+
|
730
|
+
class KeyCerts(Credentials):
|
731
|
+
"""Identical to Credentials except that
|
732
|
+
name/password are mapped to key/cert."""
|
733
|
+
pass
|
734
|
+
|
735
|
+
class AllHosts(object):
|
736
|
+
pass
|
737
|
+
|
738
|
+
class ProxyInfo(object):
|
739
|
+
"""Collect information required to use a proxy."""
|
740
|
+
bypass_hosts = ()
|
741
|
+
|
742
|
+
def __init__(self, proxy_type, proxy_host, proxy_port,
|
743
|
+
proxy_rdns=None, proxy_user=None, proxy_pass=None):
|
744
|
+
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
|
745
|
+
constants. For example:
|
746
|
+
|
747
|
+
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
|
748
|
+
proxy_host='localhost', proxy_port=8000)
|
749
|
+
"""
|
750
|
+
self.proxy_type = proxy_type
|
751
|
+
self.proxy_host = proxy_host
|
752
|
+
self.proxy_port = proxy_port
|
753
|
+
self.proxy_rdns = proxy_rdns
|
754
|
+
self.proxy_user = proxy_user
|
755
|
+
self.proxy_pass = proxy_pass
|
756
|
+
|
757
|
+
def astuple(self):
|
758
|
+
return (self.proxy_type, self.proxy_host, self.proxy_port,
|
759
|
+
self.proxy_rdns, self.proxy_user, self.proxy_pass)
|
760
|
+
|
761
|
+
def isgood(self):
|
762
|
+
return (self.proxy_host != None) and (self.proxy_port != None)
|
763
|
+
|
764
|
+
@classmethod
|
765
|
+
def from_environment(cls, method='http'):
|
766
|
+
"""
|
767
|
+
Read proxy info from the environment variables.
|
768
|
+
"""
|
769
|
+
if method not in ['http', 'https']:
|
770
|
+
return
|
771
|
+
|
772
|
+
env_var = method + '_proxy'
|
773
|
+
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
|
774
|
+
if not url:
|
775
|
+
return
|
776
|
+
pi = cls.from_url(url, method)
|
777
|
+
|
778
|
+
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
|
779
|
+
bypass_hosts = []
|
780
|
+
if no_proxy:
|
781
|
+
bypass_hosts = no_proxy.split(',')
|
782
|
+
# special case, no_proxy=* means all hosts bypassed
|
783
|
+
if no_proxy == '*':
|
784
|
+
bypass_hosts = AllHosts
|
785
|
+
|
786
|
+
pi.bypass_hosts = bypass_hosts
|
787
|
+
return pi
|
788
|
+
|
789
|
+
@classmethod
|
790
|
+
def from_url(cls, url, method='http'):
|
791
|
+
"""
|
792
|
+
Construct a ProxyInfo from a URL (such as http_proxy env var)
|
793
|
+
"""
|
794
|
+
url = urlparse.urlparse(url)
|
795
|
+
username = None
|
796
|
+
password = None
|
797
|
+
port = None
|
798
|
+
if '@' in url[1]:
|
799
|
+
ident, host_port = url[1].split('@', 1)
|
800
|
+
if ':' in ident:
|
801
|
+
username, password = ident.split(':', 1)
|
802
|
+
else:
|
803
|
+
password = ident
|
804
|
+
else:
|
805
|
+
host_port = url[1]
|
806
|
+
if ':' in host_port:
|
807
|
+
host, port = host_port.split(':', 1)
|
808
|
+
else:
|
809
|
+
host = host_port
|
810
|
+
|
811
|
+
if port:
|
812
|
+
port = int(port)
|
813
|
+
else:
|
814
|
+
port = dict(https=443, http=80)[method]
|
815
|
+
|
816
|
+
proxy_type = 3 # socks.PROXY_TYPE_HTTP
|
817
|
+
return cls(
|
818
|
+
proxy_type = proxy_type,
|
819
|
+
proxy_host = host,
|
820
|
+
proxy_port = port,
|
821
|
+
proxy_user = username or None,
|
822
|
+
proxy_pass = password or None,
|
823
|
+
)
|
824
|
+
|
825
|
+
def applies_to(self, hostname):
|
826
|
+
return not self.bypass_host(hostname)
|
827
|
+
|
828
|
+
def bypass_host(self, hostname):
|
829
|
+
"""Has this host been excluded from the proxy config"""
|
830
|
+
if self.bypass_hosts is AllHosts:
|
831
|
+
return True
|
832
|
+
|
833
|
+
bypass = False
|
834
|
+
for domain in self.bypass_hosts:
|
835
|
+
if hostname.endswith(domain):
|
836
|
+
bypass = True
|
837
|
+
|
838
|
+
return bypass
|
839
|
+
|
840
|
+
|
841
|
+
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
|
842
|
+
"""
|
843
|
+
HTTPConnection subclass that supports timeouts
|
844
|
+
|
845
|
+
All timeouts are in seconds. If None is passed for timeout then
|
846
|
+
Python's default timeout for sockets will be used. See for example
|
847
|
+
the docs of socket.setdefaulttimeout():
|
848
|
+
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
|
849
|
+
"""
|
850
|
+
|
851
|
+
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
|
852
|
+
httplib.HTTPConnection.__init__(self, host, port, strict)
|
853
|
+
self.timeout = timeout
|
854
|
+
self.proxy_info = proxy_info
|
855
|
+
|
856
|
+
def connect(self):
|
857
|
+
"""Connect to the host and port specified in __init__."""
|
858
|
+
# Mostly verbatim from httplib.py.
|
859
|
+
if self.proxy_info and socks is None:
|
860
|
+
raise ProxiesUnavailableError(
|
861
|
+
'Proxy support missing but proxy use was requested!')
|
862
|
+
msg = "getaddrinfo returns an empty list"
|
863
|
+
for res in socket.getaddrinfo(self.host, self.port, 0,
|
864
|
+
socket.SOCK_STREAM):
|
865
|
+
af, socktype, proto, canonname, sa = res
|
866
|
+
try:
|
867
|
+
if self.proxy_info and self.proxy_info.isgood():
|
868
|
+
self.sock = socks.socksocket(af, socktype, proto)
|
869
|
+
self.sock.setproxy(*self.proxy_info.astuple())
|
870
|
+
else:
|
871
|
+
self.sock = socket.socket(af, socktype, proto)
|
872
|
+
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
873
|
+
# Different from httplib: support timeouts.
|
874
|
+
if has_timeout(self.timeout):
|
875
|
+
self.sock.settimeout(self.timeout)
|
876
|
+
# End of difference from httplib.
|
877
|
+
if self.debuglevel > 0:
|
878
|
+
print "connect: (%s, %s)" % (self.host, self.port)
|
879
|
+
|
880
|
+
self.sock.connect((self.host, self.port) + sa[2:])
|
881
|
+
except socket.error, msg:
|
882
|
+
if self.debuglevel > 0:
|
883
|
+
print 'connect fail:', (self.host, self.port)
|
884
|
+
if self.sock:
|
885
|
+
self.sock.close()
|
886
|
+
self.sock = None
|
887
|
+
continue
|
888
|
+
break
|
889
|
+
if not self.sock:
|
890
|
+
raise socket.error, msg
|
891
|
+
|
892
|
+
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
|
893
|
+
"""
|
894
|
+
This class allows communication via SSL.
|
895
|
+
|
896
|
+
All timeouts are in seconds. If None is passed for timeout then
|
897
|
+
Python's default timeout for sockets will be used. See for example
|
898
|
+
the docs of socket.setdefaulttimeout():
|
899
|
+
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
|
900
|
+
"""
|
901
|
+
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
902
|
+
strict=None, timeout=None, proxy_info=None,
|
903
|
+
ca_certs=None, disable_ssl_certificate_validation=False):
|
904
|
+
httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file,
|
905
|
+
cert_file=cert_file, strict=strict)
|
906
|
+
self.timeout = timeout
|
907
|
+
self.proxy_info = proxy_info
|
908
|
+
if ca_certs is None:
|
909
|
+
ca_certs = CA_CERTS
|
910
|
+
self.ca_certs = ca_certs
|
911
|
+
self.disable_ssl_certificate_validation = \
|
912
|
+
disable_ssl_certificate_validation
|
913
|
+
|
914
|
+
# The following two methods were adapted from https_wrapper.py, released
|
915
|
+
# with the Google Appengine SDK at
|
916
|
+
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
|
917
|
+
# under the following license:
|
918
|
+
#
|
919
|
+
# Copyright 2007 Google Inc.
|
920
|
+
#
|
921
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
922
|
+
# you may not use this file except in compliance with the License.
|
923
|
+
# You may obtain a copy of the License at
|
924
|
+
#
|
925
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
926
|
+
#
|
927
|
+
# Unless required by applicable law or agreed to in writing, software
|
928
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
929
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
930
|
+
# See the License for the specific language governing permissions and
|
931
|
+
# limitations under the License.
|
932
|
+
#
|
933
|
+
|
934
|
+
def _GetValidHostsForCert(self, cert):
|
935
|
+
"""Returns a list of valid host globs for an SSL certificate.
|
936
|
+
|
937
|
+
Args:
|
938
|
+
cert: A dictionary representing an SSL certificate.
|
939
|
+
Returns:
|
940
|
+
list: A list of valid host globs.
|
941
|
+
"""
|
942
|
+
if 'subjectAltName' in cert:
|
943
|
+
return [x[1] for x in cert['subjectAltName']
|
944
|
+
if x[0].lower() == 'dns']
|
945
|
+
else:
|
946
|
+
return [x[0][1] for x in cert['subject']
|
947
|
+
if x[0][0].lower() == 'commonname']
|
948
|
+
|
949
|
+
def _ValidateCertificateHostname(self, cert, hostname):
|
950
|
+
"""Validates that a given hostname is valid for an SSL certificate.
|
951
|
+
|
952
|
+
Args:
|
953
|
+
cert: A dictionary representing an SSL certificate.
|
954
|
+
hostname: The hostname to test.
|
955
|
+
Returns:
|
956
|
+
bool: Whether or not the hostname is valid for this certificate.
|
957
|
+
"""
|
958
|
+
hosts = self._GetValidHostsForCert(cert)
|
959
|
+
for host in hosts:
|
960
|
+
host_re = host.replace('.', '\.').replace('*', '[^.]*')
|
961
|
+
if re.search('^%s$' % (host_re,), hostname, re.I):
|
962
|
+
return True
|
963
|
+
return False
|
964
|
+
|
965
|
+
def connect(self):
|
966
|
+
"Connect to a host on a given (SSL) port."
|
967
|
+
|
968
|
+
msg = "getaddrinfo returns an empty list"
|
969
|
+
for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo(
|
970
|
+
self.host, self.port, 0, socket.SOCK_STREAM):
|
971
|
+
try:
|
972
|
+
if self.proxy_info and self.proxy_info.isgood():
|
973
|
+
sock = socks.socksocket(family, socktype, proto)
|
974
|
+
sock.setproxy(*self.proxy_info.astuple())
|
975
|
+
else:
|
976
|
+
sock = socket.socket(family, socktype, proto)
|
977
|
+
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
978
|
+
|
979
|
+
if has_timeout(self.timeout):
|
980
|
+
sock.settimeout(self.timeout)
|
981
|
+
sock.connect((self.host, self.port))
|
982
|
+
self.sock =_ssl_wrap_socket(
|
983
|
+
sock, self.key_file, self.cert_file,
|
984
|
+
self.disable_ssl_certificate_validation, self.ca_certs)
|
985
|
+
if self.debuglevel > 0:
|
986
|
+
print "connect: (%s, %s)" % (self.host, self.port)
|
987
|
+
if not self.disable_ssl_certificate_validation:
|
988
|
+
cert = self.sock.getpeercert()
|
989
|
+
hostname = self.host.split(':', 0)[0]
|
990
|
+
if not self._ValidateCertificateHostname(cert, hostname):
|
991
|
+
raise CertificateHostnameMismatch(
|
992
|
+
'Server presented certificate that does not match '
|
993
|
+
'host %s: %s' % (hostname, cert), hostname, cert)
|
994
|
+
except ssl_SSLError, e:
|
995
|
+
if sock:
|
996
|
+
sock.close()
|
997
|
+
if self.sock:
|
998
|
+
self.sock.close()
|
999
|
+
self.sock = None
|
1000
|
+
# Unfortunately the ssl module doesn't seem to provide any way
|
1001
|
+
# to get at more detailed error information, in particular
|
1002
|
+
# whether the error is due to certificate validation or
|
1003
|
+
# something else (such as SSL protocol mismatch).
|
1004
|
+
if e.errno == ssl.SSL_ERROR_SSL:
|
1005
|
+
raise SSLHandshakeError(e)
|
1006
|
+
else:
|
1007
|
+
raise
|
1008
|
+
except (socket.timeout, socket.gaierror):
|
1009
|
+
raise
|
1010
|
+
except socket.error, msg:
|
1011
|
+
if self.debuglevel > 0:
|
1012
|
+
print 'connect fail:', (self.host, self.port)
|
1013
|
+
if self.sock:
|
1014
|
+
self.sock.close()
|
1015
|
+
self.sock = None
|
1016
|
+
continue
|
1017
|
+
break
|
1018
|
+
if not self.sock:
|
1019
|
+
raise socket.error, msg
|
1020
|
+
|
1021
|
+
SCHEME_TO_CONNECTION = {
|
1022
|
+
'http': HTTPConnectionWithTimeout,
|
1023
|
+
'https': HTTPSConnectionWithTimeout
|
1024
|
+
}
|
1025
|
+
|
1026
|
+
# Use a different connection object for Google App Engine
|
1027
|
+
try:
|
1028
|
+
from google.appengine.api import apiproxy_stub_map
|
1029
|
+
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
|
1030
|
+
raise ImportError # Bail out; we're not actually running on App Engine.
|
1031
|
+
from google.appengine.api.urlfetch import fetch
|
1032
|
+
from google.appengine.api.urlfetch import InvalidURLError
|
1033
|
+
from google.appengine.api.urlfetch import DownloadError
|
1034
|
+
from google.appengine.api.urlfetch import ResponseTooLargeError
|
1035
|
+
from google.appengine.api.urlfetch import SSLCertificateError
|
1036
|
+
|
1037
|
+
|
1038
|
+
class ResponseDict(dict):
|
1039
|
+
"""Is a dictionary that also has a read() method, so
|
1040
|
+
that it can pass itself off as an httlib.HTTPResponse()."""
|
1041
|
+
def read(self):
|
1042
|
+
pass
|
1043
|
+
|
1044
|
+
|
1045
|
+
class AppEngineHttpConnection(object):
|
1046
|
+
"""Emulates an httplib.HTTPConnection object, but actually uses the Google
|
1047
|
+
App Engine urlfetch library. This allows the timeout to be properly used on
|
1048
|
+
Google App Engine, and avoids using httplib, which on Google App Engine is
|
1049
|
+
just another wrapper around urlfetch.
|
1050
|
+
"""
|
1051
|
+
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
1052
|
+
strict=None, timeout=None, proxy_info=None, ca_certs=None,
|
1053
|
+
disable_certificate_validation=False):
|
1054
|
+
self.host = host
|
1055
|
+
self.port = port
|
1056
|
+
self.timeout = timeout
|
1057
|
+
if key_file or cert_file or proxy_info or ca_certs:
|
1058
|
+
raise NotSupportedOnThisPlatform()
|
1059
|
+
self.response = None
|
1060
|
+
self.scheme = 'http'
|
1061
|
+
self.validate_certificate = not disable_certificate_validation
|
1062
|
+
self.sock = True
|
1063
|
+
|
1064
|
+
def request(self, method, url, body, headers):
|
1065
|
+
# Calculate the absolute URI, which fetch requires
|
1066
|
+
netloc = self.host
|
1067
|
+
if self.port:
|
1068
|
+
netloc = '%s:%s' % (self.host, self.port)
|
1069
|
+
absolute_uri = '%s://%s%s' % (self.scheme, netloc, url)
|
1070
|
+
try:
|
1071
|
+
response = fetch(absolute_uri, payload=body, method=method,
|
1072
|
+
headers=headers, allow_truncated=False, follow_redirects=False,
|
1073
|
+
deadline=self.timeout,
|
1074
|
+
validate_certificate=self.validate_certificate)
|
1075
|
+
self.response = ResponseDict(response.headers)
|
1076
|
+
self.response['status'] = str(response.status_code)
|
1077
|
+
self.response.status = response.status_code
|
1078
|
+
setattr(self.response, 'read', lambda : response.content)
|
1079
|
+
|
1080
|
+
# Make sure the exceptions raised match the exceptions expected.
|
1081
|
+
except InvalidURLError:
|
1082
|
+
raise socket.gaierror('')
|
1083
|
+
except (DownloadError, ResponseTooLargeError, SSLCertificateError):
|
1084
|
+
raise httplib.HTTPException()
|
1085
|
+
|
1086
|
+
def getresponse(self):
|
1087
|
+
if self.response:
|
1088
|
+
return self.response
|
1089
|
+
else:
|
1090
|
+
raise httplib.HTTPException()
|
1091
|
+
|
1092
|
+
def set_debuglevel(self, level):
|
1093
|
+
pass
|
1094
|
+
|
1095
|
+
def connect(self):
|
1096
|
+
pass
|
1097
|
+
|
1098
|
+
def close(self):
|
1099
|
+
pass
|
1100
|
+
|
1101
|
+
|
1102
|
+
class AppEngineHttpsConnection(AppEngineHttpConnection):
|
1103
|
+
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
|
1104
|
+
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
1105
|
+
strict=None, timeout=None, proxy_info=None):
|
1106
|
+
AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file,
|
1107
|
+
strict, timeout, proxy_info)
|
1108
|
+
self.scheme = 'https'
|
1109
|
+
|
1110
|
+
# Update the connection classes to use the Googel App Engine specific ones.
|
1111
|
+
SCHEME_TO_CONNECTION = {
|
1112
|
+
'http': AppEngineHttpConnection,
|
1113
|
+
'https': AppEngineHttpsConnection
|
1114
|
+
}
|
1115
|
+
|
1116
|
+
except ImportError:
|
1117
|
+
pass
|
1118
|
+
|
1119
|
+
|
1120
|
+
class Http(object):
|
1121
|
+
"""An HTTP client that handles:
|
1122
|
+
- all methods
|
1123
|
+
- caching
|
1124
|
+
- ETags
|
1125
|
+
- compression,
|
1126
|
+
- HTTPS
|
1127
|
+
- Basic
|
1128
|
+
- Digest
|
1129
|
+
- WSSE
|
1130
|
+
|
1131
|
+
and more.
|
1132
|
+
"""
|
1133
|
+
def __init__(self, cache=None, timeout=None,
|
1134
|
+
proxy_info=ProxyInfo.from_environment,
|
1135
|
+
ca_certs=None, disable_ssl_certificate_validation=False):
|
1136
|
+
"""
|
1137
|
+
If 'cache' is a string then it is used as a directory name for
|
1138
|
+
a disk cache. Otherwise it must be an object that supports the
|
1139
|
+
same interface as FileCache.
|
1140
|
+
|
1141
|
+
All timeouts are in seconds. If None is passed for timeout
|
1142
|
+
then Python's default timeout for sockets will be used. See
|
1143
|
+
for example the docs of socket.setdefaulttimeout():
|
1144
|
+
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
|
1145
|
+
|
1146
|
+
`proxy_info` may be:
|
1147
|
+
- a callable that takes the http scheme ('http' or 'https') and
|
1148
|
+
returns a ProxyInfo instance per request. By default, uses
|
1149
|
+
ProxyInfo.from_environment.
|
1150
|
+
- a ProxyInfo instance (static proxy config).
|
1151
|
+
- None (proxy disabled).
|
1152
|
+
|
1153
|
+
ca_certs is the path of a file containing root CA certificates for SSL
|
1154
|
+
server certificate validation. By default, a CA cert file bundled with
|
1155
|
+
httplib2 is used.
|
1156
|
+
|
1157
|
+
If disable_ssl_certificate_validation is true, SSL cert validation will
|
1158
|
+
not be performed.
|
1159
|
+
"""
|
1160
|
+
self.proxy_info = proxy_info
|
1161
|
+
self.ca_certs = ca_certs
|
1162
|
+
self.disable_ssl_certificate_validation = \
|
1163
|
+
disable_ssl_certificate_validation
|
1164
|
+
|
1165
|
+
# Map domain name to an httplib connection
|
1166
|
+
self.connections = {}
|
1167
|
+
# The location of the cache, for now a directory
|
1168
|
+
# where cached responses are held.
|
1169
|
+
if cache and isinstance(cache, basestring):
|
1170
|
+
self.cache = FileCache(cache)
|
1171
|
+
else:
|
1172
|
+
self.cache = cache
|
1173
|
+
|
1174
|
+
# Name/password
|
1175
|
+
self.credentials = Credentials()
|
1176
|
+
|
1177
|
+
# Key/cert
|
1178
|
+
self.certificates = KeyCerts()
|
1179
|
+
|
1180
|
+
# authorization objects
|
1181
|
+
self.authorizations = []
|
1182
|
+
|
1183
|
+
# If set to False then no redirects are followed, even safe ones.
|
1184
|
+
self.follow_redirects = True
|
1185
|
+
|
1186
|
+
# Which HTTP methods do we apply optimistic concurrency to, i.e.
|
1187
|
+
# which methods get an "if-match:" etag header added to them.
|
1188
|
+
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
|
1189
|
+
|
1190
|
+
# If 'follow_redirects' is True, and this is set to True then
|
1191
|
+
# all redirecs are followed, including unsafe ones.
|
1192
|
+
self.follow_all_redirects = False
|
1193
|
+
|
1194
|
+
self.ignore_etag = False
|
1195
|
+
|
1196
|
+
self.force_exception_to_status_code = False
|
1197
|
+
|
1198
|
+
self.timeout = timeout
|
1199
|
+
|
1200
|
+
def _auth_from_challenge(self, host, request_uri, headers, response, content):
|
1201
|
+
"""A generator that creates Authorization objects
|
1202
|
+
that can be applied to requests.
|
1203
|
+
"""
|
1204
|
+
challenges = _parse_www_authenticate(response, 'www-authenticate')
|
1205
|
+
for cred in self.credentials.iter(host):
|
1206
|
+
for scheme in AUTH_SCHEME_ORDER:
|
1207
|
+
if challenges.has_key(scheme):
|
1208
|
+
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
|
1209
|
+
|
1210
|
+
def add_credentials(self, name, password, domain=""):
|
1211
|
+
"""Add a name and password that will be used
|
1212
|
+
any time a request requires authentication."""
|
1213
|
+
self.credentials.add(name, password, domain)
|
1214
|
+
|
1215
|
+
def add_certificate(self, key, cert, domain):
|
1216
|
+
"""Add a key and cert that will be used
|
1217
|
+
any time a request requires authentication."""
|
1218
|
+
self.certificates.add(key, cert, domain)
|
1219
|
+
|
1220
|
+
def clear_credentials(self):
|
1221
|
+
"""Remove all the names and passwords
|
1222
|
+
that are used for authentication"""
|
1223
|
+
self.credentials.clear()
|
1224
|
+
self.authorizations = []
|
1225
|
+
|
1226
|
+
def _conn_request(self, conn, request_uri, method, body, headers):
|
1227
|
+
for i in range(2):
|
1228
|
+
try:
|
1229
|
+
if conn.sock is None:
|
1230
|
+
conn.connect()
|
1231
|
+
conn.request(method, request_uri, body, headers)
|
1232
|
+
except socket.timeout:
|
1233
|
+
raise
|
1234
|
+
except socket.gaierror:
|
1235
|
+
conn.close()
|
1236
|
+
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
|
1237
|
+
except ssl_SSLError:
|
1238
|
+
conn.close()
|
1239
|
+
raise
|
1240
|
+
except socket.error, e:
|
1241
|
+
err = 0
|
1242
|
+
if hasattr(e, 'args'):
|
1243
|
+
err = getattr(e, 'args')[0]
|
1244
|
+
else:
|
1245
|
+
err = e.errno
|
1246
|
+
if err == errno.ECONNREFUSED: # Connection refused
|
1247
|
+
raise
|
1248
|
+
except httplib.HTTPException:
|
1249
|
+
# Just because the server closed the connection doesn't apparently mean
|
1250
|
+
# that the server didn't send a response.
|
1251
|
+
if conn.sock is None:
|
1252
|
+
if i == 0:
|
1253
|
+
conn.close()
|
1254
|
+
conn.connect()
|
1255
|
+
continue
|
1256
|
+
else:
|
1257
|
+
conn.close()
|
1258
|
+
raise
|
1259
|
+
if i == 0:
|
1260
|
+
conn.close()
|
1261
|
+
conn.connect()
|
1262
|
+
continue
|
1263
|
+
try:
|
1264
|
+
response = conn.getresponse()
|
1265
|
+
except (socket.error, httplib.HTTPException):
|
1266
|
+
if i == 0:
|
1267
|
+
conn.close()
|
1268
|
+
conn.connect()
|
1269
|
+
continue
|
1270
|
+
else:
|
1271
|
+
raise
|
1272
|
+
else:
|
1273
|
+
content = ""
|
1274
|
+
if method == "HEAD":
|
1275
|
+
response.close()
|
1276
|
+
else:
|
1277
|
+
content = response.read()
|
1278
|
+
response = Response(response)
|
1279
|
+
if method != "HEAD":
|
1280
|
+
content = _decompressContent(response, content)
|
1281
|
+
break
|
1282
|
+
return (response, content)
|
1283
|
+
|
1284
|
+
|
1285
|
+
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
|
1286
|
+
"""Do the actual request using the connection object
|
1287
|
+
and also follow one level of redirects if necessary"""
|
1288
|
+
|
1289
|
+
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
|
1290
|
+
auth = auths and sorted(auths)[0][1] or None
|
1291
|
+
if auth:
|
1292
|
+
auth.request(method, request_uri, headers, body)
|
1293
|
+
|
1294
|
+
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
|
1295
|
+
|
1296
|
+
if auth:
|
1297
|
+
if auth.response(response, body):
|
1298
|
+
auth.request(method, request_uri, headers, body)
|
1299
|
+
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
|
1300
|
+
response._stale_digest = 1
|
1301
|
+
|
1302
|
+
if response.status == 401:
|
1303
|
+
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
|
1304
|
+
authorization.request(method, request_uri, headers, body)
|
1305
|
+
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
|
1306
|
+
if response.status != 401:
|
1307
|
+
self.authorizations.append(authorization)
|
1308
|
+
authorization.response(response, body)
|
1309
|
+
break
|
1310
|
+
|
1311
|
+
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
|
1312
|
+
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
|
1313
|
+
# Pick out the location header and basically start from the beginning
|
1314
|
+
# remembering first to strip the ETag header and decrement our 'depth'
|
1315
|
+
if redirections:
|
1316
|
+
if not response.has_key('location') and response.status != 300:
|
1317
|
+
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
|
1318
|
+
# Fix-up relative redirects (which violate an RFC 2616 MUST)
|
1319
|
+
if response.has_key('location'):
|
1320
|
+
location = response['location']
|
1321
|
+
(scheme, authority, path, query, fragment) = parse_uri(location)
|
1322
|
+
if authority == None:
|
1323
|
+
response['location'] = urlparse.urljoin(absolute_uri, location)
|
1324
|
+
if response.status == 301 and method in ["GET", "HEAD"]:
|
1325
|
+
response['-x-permanent-redirect-url'] = response['location']
|
1326
|
+
if not response.has_key('content-location'):
|
1327
|
+
response['content-location'] = absolute_uri
|
1328
|
+
_updateCache(headers, response, content, self.cache, cachekey)
|
1329
|
+
if headers.has_key('if-none-match'):
|
1330
|
+
del headers['if-none-match']
|
1331
|
+
if headers.has_key('if-modified-since'):
|
1332
|
+
del headers['if-modified-since']
|
1333
|
+
if response.has_key('location'):
|
1334
|
+
location = response['location']
|
1335
|
+
old_response = copy.deepcopy(response)
|
1336
|
+
if not old_response.has_key('content-location'):
|
1337
|
+
old_response['content-location'] = absolute_uri
|
1338
|
+
redirect_method = method
|
1339
|
+
if response.status in [302, 303]:
|
1340
|
+
redirect_method = "GET"
|
1341
|
+
body = None
|
1342
|
+
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
|
1343
|
+
response.previous = old_response
|
1344
|
+
else:
|
1345
|
+
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
|
1346
|
+
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
|
1347
|
+
# Don't cache 206's since we aren't going to handle byte range requests
|
1348
|
+
if not response.has_key('content-location'):
|
1349
|
+
response['content-location'] = absolute_uri
|
1350
|
+
_updateCache(headers, response, content, self.cache, cachekey)
|
1351
|
+
|
1352
|
+
return (response, content)
|
1353
|
+
|
1354
|
+
def _normalize_headers(self, headers):
|
1355
|
+
return _normalize_headers(headers)
|
1356
|
+
|
1357
|
+
# Need to catch and rebrand some exceptions
|
1358
|
+
# Then need to optionally turn all exceptions into status codes
|
1359
|
+
# including all socket.* and httplib.* exceptions.
|
1360
|
+
|
1361
|
+
|
1362
|
+
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
|
1363
|
+
""" Performs a single HTTP request.
|
1364
|
+
The 'uri' is the URI of the HTTP resource and can begin
|
1365
|
+
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
|
1366
|
+
|
1367
|
+
The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
|
1368
|
+
There is no restriction on the methods allowed.
|
1369
|
+
|
1370
|
+
The 'body' is the entity body to be sent with the request. It is a string
|
1371
|
+
object.
|
1372
|
+
|
1373
|
+
Any extra headers that are to be sent with the request should be provided in the
|
1374
|
+
'headers' dictionary.
|
1375
|
+
|
1376
|
+
The maximum number of redirect to follow before raising an
|
1377
|
+
exception is 'redirections. The default is 5.
|
1378
|
+
|
1379
|
+
The return value is a tuple of (response, content), the first
|
1380
|
+
being and instance of the 'Response' class, the second being
|
1381
|
+
a string that contains the response entity body.
|
1382
|
+
"""
|
1383
|
+
try:
|
1384
|
+
if headers is None:
|
1385
|
+
headers = {}
|
1386
|
+
else:
|
1387
|
+
headers = self._normalize_headers(headers)
|
1388
|
+
|
1389
|
+
if not headers.has_key('user-agent'):
|
1390
|
+
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
|
1391
|
+
|
1392
|
+
uri = iri2uri(uri)
|
1393
|
+
|
1394
|
+
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
|
1395
|
+
domain_port = authority.split(":")[0:2]
|
1396
|
+
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
|
1397
|
+
scheme = 'https'
|
1398
|
+
authority = domain_port[0]
|
1399
|
+
|
1400
|
+
proxy_info = self._get_proxy_info(scheme, authority)
|
1401
|
+
|
1402
|
+
conn_key = scheme+":"+authority
|
1403
|
+
if conn_key in self.connections:
|
1404
|
+
conn = self.connections[conn_key]
|
1405
|
+
else:
|
1406
|
+
if not connection_type:
|
1407
|
+
connection_type = SCHEME_TO_CONNECTION[scheme]
|
1408
|
+
certs = list(self.certificates.iter(authority))
|
1409
|
+
if issubclass(connection_type, HTTPSConnectionWithTimeout):
|
1410
|
+
if certs:
|
1411
|
+
conn = self.connections[conn_key] = connection_type(
|
1412
|
+
authority, key_file=certs[0][0],
|
1413
|
+
cert_file=certs[0][1], timeout=self.timeout,
|
1414
|
+
proxy_info=proxy_info,
|
1415
|
+
ca_certs=self.ca_certs,
|
1416
|
+
disable_ssl_certificate_validation=
|
1417
|
+
self.disable_ssl_certificate_validation)
|
1418
|
+
else:
|
1419
|
+
conn = self.connections[conn_key] = connection_type(
|
1420
|
+
authority, timeout=self.timeout,
|
1421
|
+
proxy_info=proxy_info,
|
1422
|
+
ca_certs=self.ca_certs,
|
1423
|
+
disable_ssl_certificate_validation=
|
1424
|
+
self.disable_ssl_certificate_validation)
|
1425
|
+
else:
|
1426
|
+
conn = self.connections[conn_key] = connection_type(
|
1427
|
+
authority, timeout=self.timeout,
|
1428
|
+
proxy_info=proxy_info)
|
1429
|
+
conn.set_debuglevel(debuglevel)
|
1430
|
+
|
1431
|
+
if 'range' not in headers and 'accept-encoding' not in headers:
|
1432
|
+
headers['accept-encoding'] = 'gzip, deflate'
|
1433
|
+
|
1434
|
+
info = email.Message.Message()
|
1435
|
+
cached_value = None
|
1436
|
+
if self.cache:
|
1437
|
+
cachekey = defrag_uri
|
1438
|
+
cached_value = self.cache.get(cachekey)
|
1439
|
+
if cached_value:
|
1440
|
+
# info = email.message_from_string(cached_value)
|
1441
|
+
#
|
1442
|
+
# Need to replace the line above with the kludge below
|
1443
|
+
# to fix the non-existent bug not fixed in this
|
1444
|
+
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
|
1445
|
+
try:
|
1446
|
+
info, content = cached_value.split('\r\n\r\n', 1)
|
1447
|
+
feedparser = email.FeedParser.FeedParser()
|
1448
|
+
feedparser.feed(info)
|
1449
|
+
info = feedparser.close()
|
1450
|
+
feedparser._parse = None
|
1451
|
+
except IndexError:
|
1452
|
+
self.cache.delete(cachekey)
|
1453
|
+
cachekey = None
|
1454
|
+
cached_value = None
|
1455
|
+
else:
|
1456
|
+
cachekey = None
|
1457
|
+
|
1458
|
+
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
|
1459
|
+
# http://www.w3.org/1999/04/Editing/
|
1460
|
+
headers['if-match'] = info['etag']
|
1461
|
+
|
1462
|
+
if method not in ["GET", "HEAD"] and self.cache and cachekey:
|
1463
|
+
# RFC 2616 Section 13.10
|
1464
|
+
self.cache.delete(cachekey)
|
1465
|
+
|
1466
|
+
# Check the vary header in the cache to see if this request
|
1467
|
+
# matches what varies in the cache.
|
1468
|
+
if method in ['GET', 'HEAD'] and 'vary' in info:
|
1469
|
+
vary = info['vary']
|
1470
|
+
vary_headers = vary.lower().replace(' ', '').split(',')
|
1471
|
+
for header in vary_headers:
|
1472
|
+
key = '-varied-%s' % header
|
1473
|
+
value = info[key]
|
1474
|
+
if headers.get(header, None) != value:
|
1475
|
+
cached_value = None
|
1476
|
+
break
|
1477
|
+
|
1478
|
+
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
|
1479
|
+
if info.has_key('-x-permanent-redirect-url'):
|
1480
|
+
# Should cached permanent redirects be counted in our redirection count? For now, yes.
|
1481
|
+
if redirections <= 0:
|
1482
|
+
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
|
1483
|
+
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
|
1484
|
+
response.previous = Response(info)
|
1485
|
+
response.previous.fromcache = True
|
1486
|
+
else:
|
1487
|
+
# Determine our course of action:
|
1488
|
+
# Is the cached entry fresh or stale?
|
1489
|
+
# Has the client requested a non-cached response?
|
1490
|
+
#
|
1491
|
+
# There seems to be three possible answers:
|
1492
|
+
# 1. [FRESH] Return the cache entry w/o doing a GET
|
1493
|
+
# 2. [STALE] Do the GET (but add in cache validators if available)
|
1494
|
+
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
|
1495
|
+
entry_disposition = _entry_disposition(info, headers)
|
1496
|
+
|
1497
|
+
if entry_disposition == "FRESH":
|
1498
|
+
if not cached_value:
|
1499
|
+
info['status'] = '504'
|
1500
|
+
content = ""
|
1501
|
+
response = Response(info)
|
1502
|
+
if cached_value:
|
1503
|
+
response.fromcache = True
|
1504
|
+
return (response, content)
|
1505
|
+
|
1506
|
+
if entry_disposition == "STALE":
|
1507
|
+
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
|
1508
|
+
headers['if-none-match'] = info['etag']
|
1509
|
+
if info.has_key('last-modified') and not 'last-modified' in headers:
|
1510
|
+
headers['if-modified-since'] = info['last-modified']
|
1511
|
+
elif entry_disposition == "TRANSPARENT":
|
1512
|
+
pass
|
1513
|
+
|
1514
|
+
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
|
1515
|
+
|
1516
|
+
if response.status == 304 and method == "GET":
|
1517
|
+
# Rewrite the cache entry with the new end-to-end headers
|
1518
|
+
# Take all headers that are in response
|
1519
|
+
# and overwrite their values in info.
|
1520
|
+
# unless they are hop-by-hop, or are listed in the connection header.
|
1521
|
+
|
1522
|
+
for key in _get_end2end_headers(response):
|
1523
|
+
info[key] = response[key]
|
1524
|
+
merged_response = Response(info)
|
1525
|
+
if hasattr(response, "_stale_digest"):
|
1526
|
+
merged_response._stale_digest = response._stale_digest
|
1527
|
+
_updateCache(headers, merged_response, content, self.cache, cachekey)
|
1528
|
+
response = merged_response
|
1529
|
+
response.status = 200
|
1530
|
+
response.fromcache = True
|
1531
|
+
|
1532
|
+
elif response.status == 200:
|
1533
|
+
content = new_content
|
1534
|
+
else:
|
1535
|
+
self.cache.delete(cachekey)
|
1536
|
+
content = new_content
|
1537
|
+
else:
|
1538
|
+
cc = _parse_cache_control(headers)
|
1539
|
+
if cc.has_key('only-if-cached'):
|
1540
|
+
info['status'] = '504'
|
1541
|
+
response = Response(info)
|
1542
|
+
content = ""
|
1543
|
+
else:
|
1544
|
+
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
|
1545
|
+
except Exception, e:
|
1546
|
+
if self.force_exception_to_status_code:
|
1547
|
+
if isinstance(e, HttpLib2ErrorWithResponse):
|
1548
|
+
response = e.response
|
1549
|
+
content = e.content
|
1550
|
+
response.status = 500
|
1551
|
+
response.reason = str(e)
|
1552
|
+
elif isinstance(e, socket.timeout):
|
1553
|
+
content = "Request Timeout"
|
1554
|
+
response = Response( {
|
1555
|
+
"content-type": "text/plain",
|
1556
|
+
"status": "408",
|
1557
|
+
"content-length": len(content)
|
1558
|
+
})
|
1559
|
+
response.reason = "Request Timeout"
|
1560
|
+
else:
|
1561
|
+
content = str(e)
|
1562
|
+
response = Response( {
|
1563
|
+
"content-type": "text/plain",
|
1564
|
+
"status": "400",
|
1565
|
+
"content-length": len(content)
|
1566
|
+
})
|
1567
|
+
response.reason = "Bad Request"
|
1568
|
+
else:
|
1569
|
+
raise
|
1570
|
+
|
1571
|
+
|
1572
|
+
return (response, content)
|
1573
|
+
|
1574
|
+
def _get_proxy_info(self, scheme, authority):
|
1575
|
+
"""Return a ProxyInfo instance (or None) based on the scheme
|
1576
|
+
and authority.
|
1577
|
+
"""
|
1578
|
+
hostname, port = urllib.splitport(authority)
|
1579
|
+
proxy_info = self.proxy_info
|
1580
|
+
if callable(proxy_info):
|
1581
|
+
proxy_info = proxy_info(scheme)
|
1582
|
+
|
1583
|
+
if (hasattr(proxy_info, 'applies_to')
|
1584
|
+
and not proxy_info.applies_to(hostname)):
|
1585
|
+
proxy_info = None
|
1586
|
+
return proxy_info
|
1587
|
+
|
1588
|
+
|
1589
|
+
class Response(dict):
|
1590
|
+
"""An object more like email.Message than httplib.HTTPResponse."""
|
1591
|
+
|
1592
|
+
"""Is this response from our local cache"""
|
1593
|
+
fromcache = False
|
1594
|
+
|
1595
|
+
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
|
1596
|
+
version = 11
|
1597
|
+
|
1598
|
+
"Status code returned by server. "
|
1599
|
+
status = 200
|
1600
|
+
|
1601
|
+
"""Reason phrase returned by server."""
|
1602
|
+
reason = "Ok"
|
1603
|
+
|
1604
|
+
previous = None
|
1605
|
+
|
1606
|
+
def __init__(self, info):
|
1607
|
+
# info is either an email.Message or
|
1608
|
+
# an httplib.HTTPResponse object.
|
1609
|
+
if isinstance(info, httplib.HTTPResponse):
|
1610
|
+
for key, value in info.getheaders():
|
1611
|
+
self[key.lower()] = value
|
1612
|
+
self.status = info.status
|
1613
|
+
self['status'] = str(self.status)
|
1614
|
+
self.reason = info.reason
|
1615
|
+
self.version = info.version
|
1616
|
+
elif isinstance(info, email.Message.Message):
|
1617
|
+
for key, value in info.items():
|
1618
|
+
self[key] = value
|
1619
|
+
self.status = int(self['status'])
|
1620
|
+
else:
|
1621
|
+
for key, value in info.iteritems():
|
1622
|
+
self[key] = value
|
1623
|
+
self.status = int(self.get('status', self.status))
|
1624
|
+
|
1625
|
+
|
1626
|
+
def __getattr__(self, name):
|
1627
|
+
if name == 'dict':
|
1628
|
+
return self
|
1629
|
+
else:
|
1630
|
+
raise AttributeError, name
|