s3_cmd_bin 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. data/.gitignore +17 -0
  2. data/Gemfile +3 -0
  3. data/LICENSE.txt +22 -0
  4. data/README.md +28 -0
  5. data/Rakefile +1 -0
  6. data/lib/s3_cmd_bin/version.rb +3 -0
  7. data/lib/s3_cmd_bin.rb +15 -0
  8. data/resources/ChangeLog +1462 -0
  9. data/resources/INSTALL +97 -0
  10. data/resources/LICENSE +339 -0
  11. data/resources/MANIFEST.in +2 -0
  12. data/resources/Makefile +4 -0
  13. data/resources/NEWS +234 -0
  14. data/resources/README +342 -0
  15. data/resources/S3/ACL.py +224 -0
  16. data/resources/S3/ACL.pyc +0 -0
  17. data/resources/S3/AccessLog.py +92 -0
  18. data/resources/S3/AccessLog.pyc +0 -0
  19. data/resources/S3/BidirMap.py +42 -0
  20. data/resources/S3/BidirMap.pyc +0 -0
  21. data/resources/S3/CloudFront.py +773 -0
  22. data/resources/S3/CloudFront.pyc +0 -0
  23. data/resources/S3/Config.py +294 -0
  24. data/resources/S3/Config.pyc +0 -0
  25. data/resources/S3/ConnMan.py +71 -0
  26. data/resources/S3/ConnMan.pyc +0 -0
  27. data/resources/S3/Exceptions.py +88 -0
  28. data/resources/S3/Exceptions.pyc +0 -0
  29. data/resources/S3/FileDict.py +53 -0
  30. data/resources/S3/FileDict.pyc +0 -0
  31. data/resources/S3/FileLists.py +517 -0
  32. data/resources/S3/FileLists.pyc +0 -0
  33. data/resources/S3/HashCache.py +53 -0
  34. data/resources/S3/HashCache.pyc +0 -0
  35. data/resources/S3/MultiPart.py +137 -0
  36. data/resources/S3/MultiPart.pyc +0 -0
  37. data/resources/S3/PkgInfo.py +14 -0
  38. data/resources/S3/PkgInfo.pyc +0 -0
  39. data/resources/S3/Progress.py +173 -0
  40. data/resources/S3/Progress.pyc +0 -0
  41. data/resources/S3/S3.py +979 -0
  42. data/resources/S3/S3.pyc +0 -0
  43. data/resources/S3/S3Uri.py +223 -0
  44. data/resources/S3/S3Uri.pyc +0 -0
  45. data/resources/S3/SimpleDB.py +178 -0
  46. data/resources/S3/SortedDict.py +66 -0
  47. data/resources/S3/SortedDict.pyc +0 -0
  48. data/resources/S3/Utils.py +462 -0
  49. data/resources/S3/Utils.pyc +0 -0
  50. data/resources/S3/__init__.py +0 -0
  51. data/resources/S3/__init__.pyc +0 -0
  52. data/resources/TODO +52 -0
  53. data/resources/artwork/AtomicClockRadio.ttf +0 -0
  54. data/resources/artwork/TypeRa.ttf +0 -0
  55. data/resources/artwork/site-top-full-size.xcf +0 -0
  56. data/resources/artwork/site-top-label-download.png +0 -0
  57. data/resources/artwork/site-top-label-s3cmd.png +0 -0
  58. data/resources/artwork/site-top-label-s3sync.png +0 -0
  59. data/resources/artwork/site-top-s3tools-logo.png +0 -0
  60. data/resources/artwork/site-top.jpg +0 -0
  61. data/resources/artwork/site-top.png +0 -0
  62. data/resources/artwork/site-top.xcf +0 -0
  63. data/resources/format-manpage.pl +196 -0
  64. data/resources/magic +63 -0
  65. data/resources/run-tests.py +537 -0
  66. data/resources/s3cmd +2116 -0
  67. data/resources/s3cmd.1 +435 -0
  68. data/resources/s3db +55 -0
  69. data/resources/setup.cfg +2 -0
  70. data/resources/setup.py +80 -0
  71. data/resources/testsuite.tar.gz +0 -0
  72. data/resources/upload-to-sf.sh +7 -0
  73. data/s3_cmd_bin.gemspec +23 -0
  74. metadata +152 -0
Binary file
@@ -0,0 +1,294 @@
1
+ ## Amazon S3 manager
2
+ ## Author: Michal Ludvig <michal@logix.cz>
3
+ ## http://www.logix.cz/michal
4
+ ## License: GPL Version 2
5
+
6
+ import logging
7
+ from logging import debug, info, warning, error
8
+ import re
9
+ import os
10
+ import sys
11
+ import Progress
12
+ from SortedDict import SortedDict
13
+ import httplib
14
+ import json
15
+
16
+ class Config(object):
17
+ _instance = None
18
+ _parsed_files = []
19
+ _doc = {}
20
+ access_key = ""
21
+ secret_key = ""
22
+ access_token = ""
23
+ host_base = "s3.amazonaws.com"
24
+ host_bucket = "%(bucket)s.s3.amazonaws.com"
25
+ simpledb_host = "sdb.amazonaws.com"
26
+ cloudfront_host = "cloudfront.amazonaws.com"
27
+ verbosity = logging.WARNING
28
+ progress_meter = True
29
+ progress_class = Progress.ProgressCR
30
+ send_chunk = 4096
31
+ recv_chunk = 4096
32
+ list_md5 = False
33
+ human_readable_sizes = False
34
+ extra_headers = SortedDict(ignore_case = True)
35
+ force = False
36
+ enable = None
37
+ get_continue = False
38
+ skip_existing = False
39
+ recursive = False
40
+ acl_public = None
41
+ acl_grants = []
42
+ acl_revokes = []
43
+ proxy_host = ""
44
+ proxy_port = 3128
45
+ encrypt = False
46
+ dry_run = False
47
+ add_encoding_exts = ""
48
+ preserve_attrs = True
49
+ preserve_attrs_list = [
50
+ 'uname', # Verbose owner Name (e.g. 'root')
51
+ 'uid', # Numeric user ID (e.g. 0)
52
+ 'gname', # Group name (e.g. 'users')
53
+ 'gid', # Numeric group ID (e.g. 100)
54
+ 'atime', # Last access timestamp
55
+ 'mtime', # Modification timestamp
56
+ 'ctime', # Creation timestamp
57
+ 'mode', # File mode (e.g. rwxr-xr-x = 755)
58
+ 'md5', # File MD5 (if known)
59
+ #'acl', # Full ACL (not yet supported)
60
+ ]
61
+ delete_removed = False
62
+ delete_after = False
63
+ delete_after_fetch = False
64
+ _doc['delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted"
65
+ delay_updates = False
66
+ gpg_passphrase = ""
67
+ gpg_command = ""
68
+ gpg_encrypt = "%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
69
+ gpg_decrypt = "%(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
70
+ use_https = False
71
+ bucket_location = "US"
72
+ default_mime_type = "binary/octet-stream"
73
+ guess_mime_type = True
74
+ mime_type = ""
75
+ enable_multipart = True
76
+ multipart_chunk_size_mb = 15 # MB
77
+ # List of checks to be performed for 'sync'
78
+ sync_checks = ['size', 'md5'] # 'weak-timestamp'
79
+ # List of compiled REGEXPs
80
+ exclude = []
81
+ include = []
82
+ # Dict mapping compiled REGEXPs back to their textual form
83
+ debug_exclude = {}
84
+ debug_include = {}
85
+ encoding = "utf-8"
86
+ urlencoding_mode = "normal"
87
+ log_target_prefix = ""
88
+ reduced_redundancy = False
89
+ follow_symlinks = False
90
+ socket_timeout = 300
91
+ invalidate_on_cf = False
92
+ # joseprio: new flags for default index invalidation
93
+ invalidate_default_index_on_cf = False
94
+ invalidate_default_index_root_on_cf = True
95
+ website_index = "index.html"
96
+ website_error = ""
97
+ website_endpoint = "http://%(bucket)s.s3-website-%(location)s.amazonaws.com/"
98
+ additional_destinations = []
99
+ cache_file = ""
100
+ add_headers = ""
101
+
102
+ ## Creating a singleton
103
+ def __new__(self, configfile = None):
104
+ if self._instance is None:
105
+ self._instance = object.__new__(self)
106
+ return self._instance
107
+
108
+ def __init__(self, configfile = None):
109
+ if configfile:
110
+ try:
111
+ self.read_config_file(configfile)
112
+ except IOError, e:
113
+ if 'AWS_CREDENTIAL_FILE' in os.environ:
114
+ self.env_config()
115
+ if len(self.access_key)==0:
116
+ self.role_config()
117
+
118
+ def role_config(self):
119
+ conn = httplib.HTTPConnection(host='169.254.169.254',timeout=0.1)
120
+ try:
121
+ conn.request('GET', "/latest/meta-data/iam/security-credentials/")
122
+ resp = conn.getresponse()
123
+ files = resp.read()
124
+ if resp.status == 200 and len(files)>1:
125
+ conn.request('GET', "/latest/meta-data/iam/security-credentials/%s"%files)
126
+ resp=conn.getresponse()
127
+ if resp.status == 200:
128
+ creds=json.load(resp)
129
+ Config().update_option('access_key', creds['AccessKeyId'].encode('ascii'))
130
+ Config().update_option('secret_key', creds['SecretAccessKey'].encode('ascii'))
131
+ Config().update_option('access_token', creds['Token'].encode('ascii'))
132
+ else:
133
+ raise IOError
134
+ else:
135
+ raise IOError
136
+ except:
137
+ raise
138
+
139
+ def role_refresh(self):
140
+ try:
141
+ self.role_config()
142
+ except:
143
+ warning("Could not refresh role")
144
+
145
+ def env_config(self):
146
+ cred_content = ""
147
+ try:
148
+ cred_file = open(os.environ['AWS_CREDENTIAL_FILE'],'r')
149
+ cred_content = cred_file.read()
150
+ except IOError, e:
151
+ debug("Error %d accessing credentials file %s" % (e.errno,os.environ['AWS_CREDENTIAL_FILE']))
152
+ r_data = re.compile("^\s*(?P<orig_key>\w+)\s*=\s*(?P<value>.*)")
153
+ r_quotes = re.compile("^\"(.*)\"\s*$")
154
+ if len(cred_content)>0:
155
+ for line in cred_content.splitlines():
156
+ is_data = r_data.match(line)
157
+ is_data = r_data.match(line)
158
+ if is_data:
159
+ data = is_data.groupdict()
160
+ if r_quotes.match(data["value"]):
161
+ data["value"] = data["value"][1:-1]
162
+ if data["orig_key"]=="AWSAccessKeyId":
163
+ data["key"] = "access_key"
164
+ elif data["orig_key"]=="AWSSecretKey":
165
+ data["key"] = "secret_key"
166
+ else:
167
+ del data["key"]
168
+ if "key" in data:
169
+ Config().update_option(data["key"], data["value"])
170
+ if data["key"] in ("access_key", "secret_key", "gpg_passphrase"):
171
+ print_value = (data["value"][:2]+"...%d_chars..."+data["value"][-1:]) % (len(data["value"]) - 3)
172
+ else:
173
+ print_value = data["value"]
174
+ debug("env_Config: %s->%s" % (data["key"], print_value))
175
+
176
+
177
+
178
+ def option_list(self):
179
+ retval = []
180
+ for option in dir(self):
181
+ ## Skip attributes that start with underscore or are not string, int or bool
182
+ option_type = type(getattr(Config, option))
183
+ if option.startswith("_") or \
184
+ not (option_type in (
185
+ type("string"), # str
186
+ type(42), # int
187
+ type(True))): # bool
188
+ continue
189
+ retval.append(option)
190
+ return retval
191
+
192
+ def read_config_file(self, configfile):
193
+ cp = ConfigParser(configfile)
194
+ for option in self.option_list():
195
+ self.update_option(option, cp.get(option))
196
+
197
+ if cp.get('add_headers'):
198
+ for option in cp.get('add_headers').split(","):
199
+ (key, value) = option.split(':')
200
+ self.extra_headers[key.replace('_', '-').strip()] = value.strip()
201
+
202
+ self._parsed_files.append(configfile)
203
+
204
+ def dump_config(self, stream):
205
+ ConfigDumper(stream).dump("default", self)
206
+
207
+ def update_option(self, option, value):
208
+ if value is None:
209
+ return
210
+ #### Handle environment reference
211
+ if str(value).startswith("$"):
212
+ return self.update_option(option, os.getenv(str(value)[1:]))
213
+ #### Special treatment of some options
214
+ ## verbosity must be known to "logging" module
215
+ if option == "verbosity":
216
+ try:
217
+ setattr(Config, "verbosity", logging._levelNames[value])
218
+ except KeyError:
219
+ error("Config: verbosity level '%s' is not valid" % value)
220
+ ## allow yes/no, true/false, on/off and 1/0 for boolean options
221
+ elif type(getattr(Config, option)) is type(True): # bool
222
+ if str(value).lower() in ("true", "yes", "on", "1"):
223
+ setattr(Config, option, True)
224
+ elif str(value).lower() in ("false", "no", "off", "0"):
225
+ setattr(Config, option, False)
226
+ else:
227
+ error("Config: value of option '%s' must be Yes or No, not '%s'" % (option, value))
228
+ elif type(getattr(Config, option)) is type(42): # int
229
+ try:
230
+ setattr(Config, option, int(value))
231
+ except ValueError, e:
232
+ error("Config: value of option '%s' must be an integer, not '%s'" % (option, value))
233
+ else: # string
234
+ setattr(Config, option, value)
235
+
236
+ class ConfigParser(object):
237
+ def __init__(self, file, sections = []):
238
+ self.cfg = {}
239
+ self.parse_file(file, sections)
240
+
241
+ def parse_file(self, file, sections = []):
242
+ debug("ConfigParser: Reading file '%s'" % file)
243
+ if type(sections) != type([]):
244
+ sections = [sections]
245
+ in_our_section = True
246
+ f = open(file, "r")
247
+ r_comment = re.compile("^\s*#.*")
248
+ r_empty = re.compile("^\s*$")
249
+ r_section = re.compile("^\[([^\]]+)\]")
250
+ r_data = re.compile("^\s*(?P<key>\w+)\s*=\s*(?P<value>.*)")
251
+ r_quotes = re.compile("^\"(.*)\"\s*$")
252
+ for line in f:
253
+ if r_comment.match(line) or r_empty.match(line):
254
+ continue
255
+ is_section = r_section.match(line)
256
+ if is_section:
257
+ section = is_section.groups()[0]
258
+ in_our_section = (section in sections) or (len(sections) == 0)
259
+ continue
260
+ is_data = r_data.match(line)
261
+ if is_data and in_our_section:
262
+ data = is_data.groupdict()
263
+ if r_quotes.match(data["value"]):
264
+ data["value"] = data["value"][1:-1]
265
+ self.__setitem__(data["key"], data["value"])
266
+ if data["key"] in ("access_key", "secret_key", "gpg_passphrase"):
267
+ print_value = (data["value"][:2]+"...%d_chars..."+data["value"][-1:]) % (len(data["value"]) - 3)
268
+ else:
269
+ print_value = data["value"]
270
+ debug("ConfigParser: %s->%s" % (data["key"], print_value))
271
+ continue
272
+ warning("Ignoring invalid line in '%s': %s" % (file, line))
273
+
274
+ def __getitem__(self, name):
275
+ return self.cfg[name]
276
+
277
+ def __setitem__(self, name, value):
278
+ self.cfg[name] = value
279
+
280
+ def get(self, name, default = None):
281
+ if self.cfg.has_key(name):
282
+ return self.cfg[name]
283
+ return default
284
+
285
+ class ConfigDumper(object):
286
+ def __init__(self, stream):
287
+ self.stream = stream
288
+
289
+ def dump(self, section, config):
290
+ self.stream.write("[%s]\n" % section)
291
+ for option in config.option_list():
292
+ self.stream.write("%s = %s\n" % (option, getattr(config, option)))
293
+
294
+ # vim:et:ts=4:sts=4:ai
Binary file
@@ -0,0 +1,71 @@
1
+ import httplib
2
+ from urlparse import urlparse
3
+ from threading import Semaphore
4
+ from logging import debug, info, warning, error
5
+
6
+ from Config import Config
7
+ from Exceptions import ParameterError
8
+
9
+ __all__ = [ "ConnMan" ]
10
+
11
+ class http_connection(object):
12
+ def __init__(self, id, hostname, ssl, cfg):
13
+ self.hostname = hostname
14
+ self.ssl = ssl
15
+ self.id = id
16
+ self.counter = 0
17
+ if cfg.proxy_host != "":
18
+ self.c = httplib.HTTPConnection(cfg.proxy_host, cfg.proxy_port)
19
+ elif not ssl:
20
+ self.c = httplib.HTTPConnection(hostname)
21
+ else:
22
+ self.c = httplib.HTTPSConnection(hostname)
23
+
24
+ class ConnMan(object):
25
+ conn_pool_sem = Semaphore()
26
+ conn_pool = {}
27
+ conn_max_counter = 800 ## AWS closes connection after some ~90 requests
28
+
29
+ @staticmethod
30
+ def get(hostname, ssl = None):
31
+ cfg = Config()
32
+ if ssl == None:
33
+ ssl = cfg.use_https
34
+ conn = None
35
+ if cfg.proxy_host != "":
36
+ if ssl:
37
+ raise ParameterError("use_ssl=True can't be used with proxy")
38
+ conn_id = "proxy://%s:%s" % (cfg.proxy_host, cfg.proxy_port)
39
+ else:
40
+ conn_id = "http%s://%s" % (ssl and "s" or "", hostname)
41
+ ConnMan.conn_pool_sem.acquire()
42
+ if not ConnMan.conn_pool.has_key(conn_id):
43
+ ConnMan.conn_pool[conn_id] = []
44
+ if len(ConnMan.conn_pool[conn_id]):
45
+ conn = ConnMan.conn_pool[conn_id].pop()
46
+ debug("ConnMan.get(): re-using connection: %s#%d" % (conn.id, conn.counter))
47
+ ConnMan.conn_pool_sem.release()
48
+ if not conn:
49
+ debug("ConnMan.get(): creating new connection: %s" % conn_id)
50
+ conn = http_connection(conn_id, hostname, ssl, cfg)
51
+ conn.c.connect()
52
+ conn.counter += 1
53
+ return conn
54
+
55
+ @staticmethod
56
+ def put(conn):
57
+ if conn.id.startswith("proxy://"):
58
+ conn.c.close()
59
+ debug("ConnMan.put(): closing proxy connection (keep-alive not yet supported)")
60
+ return
61
+
62
+ if conn.counter >= ConnMan.conn_max_counter:
63
+ conn.c.close()
64
+ debug("ConnMan.put(): closing over-used connection")
65
+ return
66
+
67
+ ConnMan.conn_pool_sem.acquire()
68
+ ConnMan.conn_pool[conn.id].append(conn)
69
+ ConnMan.conn_pool_sem.release()
70
+ debug("ConnMan.put(): connection put back to pool (%s#%d)" % (conn.id, conn.counter))
71
+
Binary file
@@ -0,0 +1,88 @@
1
+ ## Amazon S3 manager - Exceptions library
2
+ ## Author: Michal Ludvig <michal@logix.cz>
3
+ ## http://www.logix.cz/michal
4
+ ## License: GPL Version 2
5
+
6
+ from Utils import getTreeFromXml, unicodise, deunicodise
7
+ from logging import debug, info, warning, error
8
+
9
+ try:
10
+ import xml.etree.ElementTree as ET
11
+ except ImportError:
12
+ import elementtree.ElementTree as ET
13
+
14
+ class S3Exception(Exception):
15
+ def __init__(self, message = ""):
16
+ self.message = unicodise(message)
17
+
18
+ def __str__(self):
19
+ ## Call unicode(self) instead of self.message because
20
+ ## __unicode__() method could be overriden in subclasses!
21
+ return deunicodise(unicode(self))
22
+
23
+ def __unicode__(self):
24
+ return self.message
25
+
26
+ ## (Base)Exception.message has been deprecated in Python 2.6
27
+ def _get_message(self):
28
+ return self._message
29
+ def _set_message(self, message):
30
+ self._message = message
31
+ message = property(_get_message, _set_message)
32
+
33
+
34
+ class S3Error (S3Exception):
35
+ def __init__(self, response):
36
+ self.status = response["status"]
37
+ self.reason = response["reason"]
38
+ self.info = {
39
+ "Code" : "",
40
+ "Message" : "",
41
+ "Resource" : ""
42
+ }
43
+ debug("S3Error: %s (%s)" % (self.status, self.reason))
44
+ if response.has_key("headers"):
45
+ for header in response["headers"]:
46
+ debug("HttpHeader: %s: %s" % (header, response["headers"][header]))
47
+ if response.has_key("data") and response["data"]:
48
+ tree = getTreeFromXml(response["data"])
49
+ error_node = tree
50
+ if not error_node.tag == "Error":
51
+ error_node = tree.find(".//Error")
52
+ for child in error_node.getchildren():
53
+ if child.text != "":
54
+ debug("ErrorXML: " + child.tag + ": " + repr(child.text))
55
+ self.info[child.tag] = child.text
56
+ self.code = self.info["Code"]
57
+ self.message = self.info["Message"]
58
+ self.resource = self.info["Resource"]
59
+
60
+ def __unicode__(self):
61
+ retval = u"%d " % (self.status)
62
+ retval += (u"(%s)" % (self.info.has_key("Code") and self.info["Code"] or self.reason))
63
+ if self.info.has_key("Message"):
64
+ retval += (u": %s" % self.info["Message"])
65
+ return retval
66
+
67
+ class CloudFrontError(S3Error):
68
+ pass
69
+
70
+ class S3UploadError(S3Exception):
71
+ pass
72
+
73
+ class S3DownloadError(S3Exception):
74
+ pass
75
+
76
+ class S3RequestError(S3Exception):
77
+ pass
78
+
79
+ class S3ResponseError(S3Exception):
80
+ pass
81
+
82
+ class InvalidFileError(S3Exception):
83
+ pass
84
+
85
+ class ParameterError(S3Exception):
86
+ pass
87
+
88
+ # vim:et:ts=4:sts=4:ai
Binary file
@@ -0,0 +1,53 @@
1
+ ## Amazon S3 manager
2
+ ## Author: Michal Ludvig <michal@logix.cz>
3
+ ## http://www.logix.cz/michal
4
+ ## License: GPL Version 2
5
+
6
+ from SortedDict import SortedDict
7
+ import Utils
8
+
9
+ class FileDict(SortedDict):
10
+ def __init__(self, mapping = {}, ignore_case = True, **kwargs):
11
+ SortedDict.__init__(self, mapping = mapping, ignore_case = ignore_case, **kwargs)
12
+ self.hardlinks = dict() # { dev: { inode : {'md5':, 'relative_files':}}}
13
+ self.by_md5 = dict() # {md5: set(relative_files)}
14
+
15
+ def record_md5(self, relative_file, md5):
16
+ if md5 not in self.by_md5:
17
+ self.by_md5[md5] = set()
18
+ self.by_md5[md5].add(relative_file)
19
+
20
+ def find_md5_one(self, md5):
21
+ try:
22
+ return list(self.by_md5.get(md5, set()))[0]
23
+ except:
24
+ return None
25
+
26
+ def get_md5(self, relative_file):
27
+ """returns md5 if it can, or raises IOError if file is unreadable"""
28
+ md5 = None
29
+ if 'md5' in self[relative_file]:
30
+ return self[relative_file]['md5']
31
+ md5 = self.get_hardlink_md5(relative_file)
32
+ if md5 is None:
33
+ md5 = Utils.hash_file_md5(self[relative_file]['full_name'])
34
+ self.record_md5(relative_file, md5)
35
+ self[relative_file]['md5'] = md5
36
+ return md5
37
+
38
+ def record_hardlink(self, relative_file, dev, inode, md5):
39
+ if dev not in self.hardlinks:
40
+ self.hardlinks[dev] = dict()
41
+ if inode not in self.hardlinks[dev]:
42
+ self.hardlinks[dev][inode] = dict(md5=md5, relative_files=set())
43
+ self.hardlinks[dev][inode]['relative_files'].add(relative_file)
44
+
45
+ def get_hardlink_md5(self, relative_file):
46
+ md5 = None
47
+ dev = self[relative_file]['dev']
48
+ inode = self[relative_file]['inode']
49
+ try:
50
+ md5 = self.hardlinks[dev][inode]['md5']
51
+ except:
52
+ pass
53
+ return md5
Binary file