swift 2.31.1__py2.py3-none-any.whl → 2.32.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. swift/cli/info.py +9 -2
  2. swift/cli/ringbuilder.py +5 -1
  3. swift/common/container_sync_realms.py +6 -7
  4. swift/common/daemon.py +7 -3
  5. swift/common/db.py +22 -7
  6. swift/common/db_replicator.py +19 -20
  7. swift/common/direct_client.py +63 -14
  8. swift/common/internal_client.py +24 -3
  9. swift/common/manager.py +43 -44
  10. swift/common/memcached.py +168 -74
  11. swift/common/middleware/__init__.py +4 -0
  12. swift/common/middleware/account_quotas.py +98 -40
  13. swift/common/middleware/backend_ratelimit.py +6 -4
  14. swift/common/middleware/crossdomain.py +21 -8
  15. swift/common/middleware/listing_formats.py +26 -38
  16. swift/common/middleware/proxy_logging.py +12 -9
  17. swift/common/middleware/s3api/controllers/bucket.py +8 -2
  18. swift/common/middleware/s3api/s3api.py +9 -4
  19. swift/common/middleware/s3api/s3request.py +32 -24
  20. swift/common/middleware/s3api/s3response.py +10 -1
  21. swift/common/middleware/tempauth.py +9 -10
  22. swift/common/middleware/versioned_writes/__init__.py +0 -3
  23. swift/common/middleware/versioned_writes/object_versioning.py +22 -5
  24. swift/common/middleware/x_profile/html_viewer.py +1 -1
  25. swift/common/middleware/xprofile.py +5 -0
  26. swift/common/request_helpers.py +1 -2
  27. swift/common/ring/ring.py +22 -19
  28. swift/common/swob.py +2 -1
  29. swift/common/{utils.py → utils/__init__.py} +610 -1146
  30. swift/common/utils/ipaddrs.py +256 -0
  31. swift/common/utils/libc.py +345 -0
  32. swift/common/utils/timestamp.py +399 -0
  33. swift/common/wsgi.py +70 -39
  34. swift/container/backend.py +106 -38
  35. swift/container/server.py +11 -2
  36. swift/container/sharder.py +34 -15
  37. swift/locale/de/LC_MESSAGES/swift.po +1 -320
  38. swift/locale/en_GB/LC_MESSAGES/swift.po +1 -347
  39. swift/locale/es/LC_MESSAGES/swift.po +1 -279
  40. swift/locale/fr/LC_MESSAGES/swift.po +1 -209
  41. swift/locale/it/LC_MESSAGES/swift.po +1 -207
  42. swift/locale/ja/LC_MESSAGES/swift.po +2 -278
  43. swift/locale/ko_KR/LC_MESSAGES/swift.po +3 -303
  44. swift/locale/pt_BR/LC_MESSAGES/swift.po +1 -204
  45. swift/locale/ru/LC_MESSAGES/swift.po +1 -203
  46. swift/locale/tr_TR/LC_MESSAGES/swift.po +1 -192
  47. swift/locale/zh_CN/LC_MESSAGES/swift.po +1 -192
  48. swift/locale/zh_TW/LC_MESSAGES/swift.po +1 -193
  49. swift/obj/diskfile.py +19 -6
  50. swift/obj/server.py +20 -6
  51. swift/obj/ssync_receiver.py +19 -9
  52. swift/obj/ssync_sender.py +10 -10
  53. swift/proxy/controllers/account.py +7 -7
  54. swift/proxy/controllers/base.py +374 -366
  55. swift/proxy/controllers/container.py +112 -53
  56. swift/proxy/controllers/obj.py +254 -390
  57. swift/proxy/server.py +3 -8
  58. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-server +1 -1
  59. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-server +1 -1
  60. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-drive-audit +45 -14
  61. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-server +1 -1
  62. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-proxy-server +1 -1
  63. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/AUTHORS +4 -0
  64. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/METADATA +32 -35
  65. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/RECORD +103 -100
  66. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/WHEEL +1 -1
  67. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/entry_points.txt +0 -1
  68. swift-2.32.1.dist-info/pbr.json +1 -0
  69. swift-2.31.1.dist-info/pbr.json +0 -1
  70. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-audit +0 -0
  71. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-auditor +0 -0
  72. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-info +0 -0
  73. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-reaper +0 -0
  74. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-account-replicator +0 -0
  75. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-config +0 -0
  76. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-auditor +0 -0
  77. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-info +0 -0
  78. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-reconciler +0 -0
  79. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-replicator +0 -0
  80. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-sharder +0 -0
  81. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-sync +0 -0
  82. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-container-updater +0 -0
  83. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-dispersion-populate +0 -0
  84. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-dispersion-report +0 -0
  85. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-form-signature +0 -0
  86. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-get-nodes +0 -0
  87. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-init +0 -0
  88. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-auditor +0 -0
  89. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-expirer +0 -0
  90. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-info +0 -0
  91. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-reconstructor +0 -0
  92. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-relinker +0 -0
  93. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-replicator +0 -0
  94. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-object-updater +0 -0
  95. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-oldies +0 -0
  96. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-orphans +0 -0
  97. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-recon +0 -0
  98. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-recon-cron +0 -0
  99. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-reconciler-enqueue +0 -0
  100. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-ring-builder +0 -0
  101. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-ring-builder-analyzer +0 -0
  102. {swift-2.31.1.data → swift-2.32.1.data}/scripts/swift-ring-composer +0 -0
  103. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/LICENSE +0 -0
  104. {swift-2.31.1.dist-info → swift-2.32.1.dist-info}/top_level.txt +0 -0
swift/cli/info.py CHANGED
@@ -30,6 +30,7 @@ from swift.container.backend import ContainerBroker, DATADIR as CBDATADIR
30
30
  from swift.obj.diskfile import get_data_dir, read_metadata, DATADIR_BASE, \
31
31
  extract_policy
32
32
  from swift.common.storage_policy import POLICIES
33
+ from swift.common.swob import wsgi_to_str
33
34
  from swift.common.middleware.crypto.crypto_utils import load_crypto_meta
34
35
  from swift.common.utils import md5
35
36
 
@@ -297,12 +298,16 @@ def print_db_info_metadata(db_type, info, metadata, drop_prefixes=False,
297
298
  title = key.replace('_', '-').title()
298
299
  print(' %s: %s' % (title, value))
299
300
  if sys_metadata:
300
- print(' System Metadata: %s' % sys_metadata)
301
+ print(' System Metadata:')
302
+ for key, value in sys_metadata.items():
303
+ print(' %s: %s' % (key, value))
301
304
  else:
302
305
  print('No system metadata found in db file')
303
306
 
304
307
  if user_metadata:
305
- print(' User Metadata: %s' % user_metadata)
308
+ print(' User Metadata:')
309
+ for key, value in user_metadata.items():
310
+ print(' %s: %s' % (key, value))
306
311
  else:
307
312
  print('No user metadata found in db file')
308
313
 
@@ -537,6 +542,8 @@ def print_obj(datafile, check_etag=True, swift_dir='/etc/swift',
537
542
  except EOFError:
538
543
  print("Invalid metadata")
539
544
  raise InfoSystemExit()
545
+ metadata = {wsgi_to_str(k): v if k == 'name' else wsgi_to_str(v)
546
+ for k, v in metadata.items()}
540
547
 
541
548
  etag = metadata.pop('ETag', '')
542
549
  length = metadata.pop('Content-Length', '')
swift/cli/ringbuilder.py CHANGED
@@ -194,7 +194,11 @@ def check_devs(devs, input_question, opts, abort_msg):
194
194
  print('Matched more than one device:')
195
195
  for dev in devs:
196
196
  print(' %s' % format_device(dev))
197
- if not opts.yes and input(input_question) != 'y':
197
+ try:
198
+ abort = not opts.yes and input(input_question) != 'y'
199
+ except (EOFError, KeyboardInterrupt):
200
+ abort = True
201
+ if abort:
198
202
  print(abort_msg)
199
203
  exit(EXIT_ERROR)
200
204
 
@@ -22,7 +22,6 @@ import time
22
22
  import six
23
23
  from six.moves import configparser
24
24
 
25
- from swift import gettext_ as _
26
25
  from swift.common.utils import get_valid_utf8_str
27
26
 
28
27
 
@@ -58,7 +57,7 @@ class ContainerSyncRealms(object):
58
57
  log_func = self.logger.debug
59
58
  else:
60
59
  log_func = self.logger.error
61
- log_func(_('Could not load %(conf)r: %(error)s') % {
60
+ log_func('Could not load %(conf)r: %(error)s', {
62
61
  'conf': self.conf_path, 'error': err})
63
62
  else:
64
63
  if mtime != self.conf_path_mtime:
@@ -68,8 +67,8 @@ class ContainerSyncRealms(object):
68
67
  conf.read(self.conf_path)
69
68
  except configparser.ParsingError as err:
70
69
  self.logger.error(
71
- _('Could not load %(conf)r: %(error)s')
72
- % {'conf': self.conf_path, 'error': err})
70
+ 'Could not load %(conf)r: %(error)s',
71
+ {'conf': self.conf_path, 'error': err})
73
72
  else:
74
73
  try:
75
74
  self.mtime_check_interval = conf.getfloat(
@@ -82,9 +81,9 @@ class ContainerSyncRealms(object):
82
81
  now + self.mtime_check_interval
83
82
  except (configparser.ParsingError, ValueError) as err:
84
83
  self.logger.error(
85
- _('Error in %(conf)r with '
86
- 'mtime_check_interval: %(error)s')
87
- % {'conf': self.conf_path, 'error': err})
84
+ 'Error in %(conf)r with '
85
+ 'mtime_check_interval: %(error)s',
86
+ {'conf': self.conf_path, 'error': err})
88
87
  realms = {}
89
88
  for section in conf.sections():
90
89
  realm = {}
swift/common/daemon.py CHANGED
@@ -20,8 +20,8 @@ import time
20
20
  import signal
21
21
  from re import sub
22
22
 
23
+ import eventlet
23
24
  import eventlet.debug
24
- from eventlet.hubs import use_hub
25
25
 
26
26
  from swift.common import utils
27
27
 
@@ -281,7 +281,9 @@ def run_daemon(klass, conf_file, section_name='', once=False, **kwargs):
281
281
  # and results in an exit code of 1.
282
282
  sys.exit(e)
283
283
 
284
- use_hub(utils.get_hub())
284
+ # patch eventlet/logging early
285
+ utils.monkey_patch()
286
+ eventlet.hubs.use_hub(utils.get_hub())
285
287
 
286
288
  # once on command line (i.e. daemonize=false) will over-ride config
287
289
  once = once or not utils.config_true_value(conf.get('daemonize', 'true'))
@@ -315,7 +317,9 @@ def run_daemon(klass, conf_file, section_name='', once=False, **kwargs):
315
317
 
316
318
  logger.notice('Starting %s', os.getpid())
317
319
  try:
318
- DaemonStrategy(klass(conf), logger).run(once=once, **kwargs)
320
+ d = klass(conf)
321
+ DaemonStrategy(d, logger).run(once=once, **kwargs)
319
322
  except KeyboardInterrupt:
320
323
  logger.info('User quit')
321
324
  logger.notice('Exited %s', os.getpid())
325
+ return d
swift/common/db.py CHANGED
@@ -26,7 +26,6 @@ import time
26
26
  import errno
27
27
  import six
28
28
  import six.moves.cPickle as pickle
29
- from swift import gettext_ as _
30
29
  from tempfile import mkstemp
31
30
 
32
31
  from eventlet import sleep, Timeout
@@ -130,6 +129,9 @@ class DatabaseAlreadyExists(sqlite3.DatabaseError):
130
129
 
131
130
  class GreenDBConnection(sqlite3.Connection):
132
131
  """SQLite DB Connection handler that plays well with eventlet."""
132
+ # slots are needed for python 3.11.0 (there's an issue fixed in 3.11.1,
133
+ # see https://github.com/python/cpython/issues/99886)
134
+ __slots__ = ('timeout', 'db_file')
133
135
 
134
136
  def __init__(self, database, timeout=None, *args, **kwargs):
135
137
  if timeout is None:
@@ -143,6 +145,13 @@ class GreenDBConnection(sqlite3.Connection):
143
145
  cls = GreenDBCursor
144
146
  return sqlite3.Connection.cursor(self, cls)
145
147
 
148
+ def execute(self, *args, **kwargs):
149
+ # py311 stopped calling self.cursor() to get the cursor;
150
+ # see https://github.com/python/cpython/pull/31351
151
+ curs = self.cursor()
152
+ curs.execute(*args, **kwargs)
153
+ return curs
154
+
146
155
  def commit(self):
147
156
  return _db_timeout(
148
157
  self.timeout, self.db_file,
@@ -151,6 +160,9 @@ class GreenDBConnection(sqlite3.Connection):
151
160
 
152
161
  class GreenDBCursor(sqlite3.Cursor):
153
162
  """SQLite Cursor handler that plays well with eventlet."""
163
+ # slots are needed for python 3.11.0 (there's an issue fixed in 3.11.1,
164
+ # see https://github.com/python/cpython/issues/99886)
165
+ __slots__ = ('timeout', 'db_file')
154
166
 
155
167
  def __init__(self, *args, **kwargs):
156
168
  self.timeout = args[0].timeout
@@ -162,6 +174,9 @@ class GreenDBCursor(sqlite3.Cursor):
162
174
  self.timeout, self.db_file, lambda: sqlite3.Cursor.execute(
163
175
  self, *args, **kwargs))
164
176
 
177
+ # NB: executemany and executescript are *not* greened, and never have been
178
+ # (as far as I can tell)
179
+
165
180
 
166
181
  def dict_factory(crs, row):
167
182
  """
@@ -482,10 +497,10 @@ class DatabaseBroker(object):
482
497
  raise
483
498
  quar_path = "%s-%s" % (quar_path, uuid4().hex)
484
499
  renamer(self.db_dir, quar_path, fsync=False)
485
- detail = _('Quarantined %(db_dir)s to %(quar_path)s due to '
486
- '%(reason)s') % {'db_dir': self.db_dir,
487
- 'quar_path': quar_path,
488
- 'reason': reason}
500
+ detail = ('Quarantined %(db_dir)s to %(quar_path)s due to '
501
+ '%(reason)s') % {'db_dir': self.db_dir,
502
+ 'quar_path': quar_path,
503
+ 'reason': reason}
489
504
  self.logger.error(detail)
490
505
  raise sqlite3.DatabaseError(detail)
491
506
 
@@ -584,7 +599,7 @@ class DatabaseBroker(object):
584
599
  self.conn = conn
585
600
  except (Exception, Timeout):
586
601
  logging.exception(
587
- _('Broker error trying to rollback locked connection'))
602
+ 'Broker error trying to rollback locked connection')
588
603
  conn.close()
589
604
 
590
605
  def _new_db_id(self):
@@ -836,7 +851,7 @@ class DatabaseBroker(object):
836
851
  self._commit_puts_load(item_list, data)
837
852
  except Exception:
838
853
  self.logger.exception(
839
- _('Invalid pending entry %(file)s: %(entry)s'),
854
+ 'Invalid pending entry %(file)s: %(entry)s',
840
855
  {'file': self.pending_file, 'entry': entry})
841
856
  if item_list:
842
857
  self.merge_items(item_list)
@@ -23,7 +23,6 @@ import uuid
23
23
  import errno
24
24
  import re
25
25
  from contextlib import contextmanager
26
- from swift import gettext_ as _
27
26
 
28
27
  from eventlet import GreenPool, sleep, Timeout
29
28
  from eventlet.green import subprocess
@@ -176,7 +175,7 @@ class ReplConnection(BufferedHTTPConnection):
176
175
  except (Exception, Timeout):
177
176
  self.close()
178
177
  self.logger.exception(
179
- _('ERROR reading HTTP response from %s'), self.node)
178
+ 'ERROR reading HTTP response from %s', self.node)
180
179
  return None
181
180
 
182
181
 
@@ -254,15 +253,15 @@ class Replicator(Daemon):
254
253
  """Report the current stats to the logs."""
255
254
  now = time.time()
256
255
  self.logger.info(
257
- _('Attempted to replicate %(count)d dbs in %(time).5f seconds '
258
- '(%(rate).5f/s)'),
256
+ 'Attempted to replicate %(count)d dbs in %(time).5f seconds '
257
+ '(%(rate).5f/s)',
259
258
  {'count': self.stats['attempted'],
260
259
  'time': now - self.stats['start'],
261
260
  'rate': self.stats['attempted'] /
262
261
  (now - self.stats['start'] + 0.0000001)})
263
- self.logger.info(_('Removed %(remove)d dbs') % self.stats)
264
- self.logger.info(_('%(success)s successes, %(failure)s failures')
265
- % self.stats)
262
+ self.logger.info('Removed %(remove)d dbs', self.stats)
263
+ self.logger.info('%(success)s successes, %(failure)s failures',
264
+ self.stats)
266
265
  dump_recon_cache(
267
266
  {'replication_stats': self.stats,
268
267
  'replication_time': now - self.stats['start'],
@@ -308,7 +307,7 @@ class Replicator(Daemon):
308
307
  proc = subprocess.Popen(popen_args)
309
308
  proc.communicate()
310
309
  if proc.returncode != 0:
311
- self.logger.error(_('ERROR rsync failed with %(code)s: %(args)s'),
310
+ self.logger.error('ERROR rsync failed with %(code)s: %(args)s',
312
311
  {'code': proc.returncode, 'args': popen_args})
313
312
  return proc.returncode == 0
314
313
 
@@ -625,10 +624,10 @@ class Replicator(Daemon):
625
624
  'replicate out and remove.' % (object_file, name, bpart))
626
625
  except (Exception, Timeout) as e:
627
626
  if 'no such table' in str(e):
628
- self.logger.error(_('Quarantining DB %s'), object_file)
627
+ self.logger.error('Quarantining DB %s', object_file)
629
628
  quarantine_db(broker.db_file, broker.db_type)
630
629
  else:
631
- self.logger.exception(_('ERROR reading db %s'), object_file)
630
+ self.logger.exception('ERROR reading db %s', object_file)
632
631
  nodes = self.ring.get_part_nodes(int(partition))
633
632
  self._add_failure_stats([(failure_dev['replication_ip'],
634
633
  failure_dev['device'])
@@ -680,13 +679,13 @@ class Replicator(Daemon):
680
679
  repl_nodes.append(next(more_nodes))
681
680
  except StopIteration:
682
681
  self.logger.error(
683
- _('ERROR There are not enough handoff nodes to reach '
684
- 'replica count for partition %s'),
682
+ 'ERROR There are not enough handoff nodes to reach '
683
+ 'replica count for partition %s',
685
684
  partition)
686
- self.logger.error(_('ERROR Remote drive not mounted %s'), node)
685
+ self.logger.error('ERROR Remote drive not mounted %s', node)
687
686
  except (Exception, Timeout):
688
- self.logger.exception(_('ERROR syncing %(file)s with node'
689
- ' %(node)s'),
687
+ self.logger.exception('ERROR syncing %(file)s with node'
688
+ ' %(node)s',
690
689
  {'file': object_file, 'node': node})
691
690
  if not success:
692
691
  failure_devs_info.add((node['replication_ip'], node['device']))
@@ -785,7 +784,7 @@ class Replicator(Daemon):
785
784
  dirs = []
786
785
  ips = whataremyips(self.bind_ip)
787
786
  if not ips:
788
- self.logger.error(_('ERROR Failed to get my own IPs?'))
787
+ self.logger.error('ERROR Failed to get my own IPs?')
789
788
  return
790
789
 
791
790
  if self.handoffs_only or self.handoff_delete:
@@ -830,12 +829,12 @@ class Replicator(Daemon):
830
829
  self.logger.error("Can't find itself %s with port %s in ring "
831
830
  "file, not replicating",
832
831
  ", ".join(ips), self.port)
833
- self.logger.info(_('Beginning replication run'))
832
+ self.logger.info('Beginning replication run')
834
833
  for part, object_file, node_id in self.roundrobin_datadirs(dirs):
835
834
  self.cpool.spawn_n(
836
835
  self._replicate_object, part, object_file, node_id)
837
836
  self.cpool.waitall()
838
- self.logger.info(_('Replication run OVER'))
837
+ self.logger.info('Replication run OVER')
839
838
  if self.handoffs_only or self.handoff_delete:
840
839
  self.logger.warning(
841
840
  'Finished replication pass with handoffs_only and/or '
@@ -853,7 +852,7 @@ class Replicator(Daemon):
853
852
  try:
854
853
  self.run_once()
855
854
  except (Exception, Timeout):
856
- self.logger.exception(_('ERROR trying to replicate'))
855
+ self.logger.exception('ERROR trying to replicate')
857
856
  elapsed = time.time() - begin
858
857
  if elapsed < self.interval:
859
858
  sleep(self.interval - elapsed)
@@ -957,7 +956,7 @@ class ReplicatorRpc(object):
957
956
  info = self._get_synced_replication_info(broker, remote_info)
958
957
  except (Exception, Timeout) as e:
959
958
  if 'no such table' in str(e):
960
- self.logger.error(_("Quarantining DB %s"), broker)
959
+ self.logger.error("Quarantining DB %s", broker)
961
960
  quarantine_db(broker.db_file, broker.db_type)
962
961
  return HTTPNotFound()
963
962
  raise
@@ -156,29 +156,46 @@ def _get_direct_account_container(path, stype, node, part,
156
156
  marker=None, limit=None,
157
157
  prefix=None, delimiter=None,
158
158
  conn_timeout=5, response_timeout=15,
159
- end_marker=None, reverse=None, headers=None):
160
- """Base class for get direct account and container.
159
+ end_marker=None, reverse=None, headers=None,
160
+ extra_params=None):
161
+ """Base function for get direct account and container.
161
162
 
162
- Do not use directly use the get_direct_account or
163
- get_direct_container instead.
163
+ Do not use directly use the direct_get_account or
164
+ direct_get_container instead.
164
165
  """
165
166
  if headers is None:
166
167
  headers = {}
167
168
 
168
- params = ['format=json']
169
+ params = {'format': 'json'}
170
+ if extra_params:
171
+ for key, value in extra_params.items():
172
+ if value is not None:
173
+ params[key] = value
169
174
  if marker:
170
- params.append('marker=%s' % quote(marker))
175
+ if 'marker' in params:
176
+ raise TypeError('duplicate values for keyword arg: marker')
177
+ params['marker'] = quote(marker)
171
178
  if limit:
172
- params.append('limit=%d' % limit)
179
+ if 'limit' in params:
180
+ raise TypeError('duplicate values for keyword arg: limit')
181
+ params['limit'] = '%d' % limit
173
182
  if prefix:
174
- params.append('prefix=%s' % quote(prefix))
183
+ if 'prefix' in params:
184
+ raise TypeError('duplicate values for keyword arg: prefix')
185
+ params['prefix'] = quote(prefix)
175
186
  if delimiter:
176
- params.append('delimiter=%s' % quote(delimiter))
187
+ if 'delimiter' in params:
188
+ raise TypeError('duplicate values for keyword arg: delimiter')
189
+ params['delimiter'] = quote(delimiter)
177
190
  if end_marker:
178
- params.append('end_marker=%s' % quote(end_marker))
191
+ if 'end_marker' in params:
192
+ raise TypeError('duplicate values for keyword arg: end_marker')
193
+ params['end_marker'] = quote(end_marker)
179
194
  if reverse:
180
- params.append('reverse=%s' % quote(reverse))
181
- qs = '&'.join(params)
195
+ if 'reverse' in params:
196
+ raise TypeError('duplicate values for keyword arg: reverse')
197
+ params['reverse'] = quote(reverse)
198
+ qs = '&'.join('%s=%s' % (k, v) for k, v in params.items())
182
199
 
183
200
  ip, port = get_ip_port(node, headers)
184
201
  with Timeout(conn_timeout):
@@ -293,7 +310,7 @@ def direct_head_container(node, part, account, container, conn_timeout=5,
293
310
  def direct_get_container(node, part, account, container, marker=None,
294
311
  limit=None, prefix=None, delimiter=None,
295
312
  conn_timeout=5, response_timeout=15, end_marker=None,
296
- reverse=None, headers=None):
313
+ reverse=None, headers=None, extra_params=None):
297
314
  """
298
315
  Get container listings directly from the container server.
299
316
 
@@ -310,6 +327,12 @@ def direct_get_container(node, part, account, container, marker=None,
310
327
  :param end_marker: end_marker query
311
328
  :param reverse: reverse the returned listing
312
329
  :param headers: headers to be included in the request
330
+ :param extra_params: a dict of extra parameters to be included in the
331
+ request. It can be used to pass additional parameters, e.g,
332
+ {'states':'updating'} can be used with shard_range/namespace listing.
333
+ It can also be used to pass the existing keyword args, like 'marker' or
334
+ 'limit', but if the same parameter appears twice in both keyword arg
335
+ (not None) and extra_params, this function will raise TypeError.
313
336
  :returns: a tuple of (response headers, a list of objects) The response
314
337
  headers will be a HeaderKeyDict.
315
338
  """
@@ -322,7 +345,8 @@ def direct_get_container(node, part, account, container, marker=None,
322
345
  reverse=reverse,
323
346
  conn_timeout=conn_timeout,
324
347
  response_timeout=response_timeout,
325
- headers=headers)
348
+ headers=headers,
349
+ extra_params=extra_params)
326
350
 
327
351
 
328
352
  def direct_delete_container(node, part, account, container, conn_timeout=5,
@@ -378,6 +402,31 @@ def direct_put_container(node, part, account, container, conn_timeout=5,
378
402
  content_length=content_length, chunk_size=chunk_size)
379
403
 
380
404
 
405
+ def direct_post_container(node, part, account, container, conn_timeout=5,
406
+ response_timeout=15, headers=None):
407
+ """
408
+ Make a POST request to a container server.
409
+
410
+ :param node: node dictionary from the ring
411
+ :param part: partition the container is on
412
+ :param account: account name
413
+ :param container: container name
414
+ :param conn_timeout: timeout in seconds for establishing the connection
415
+ :param response_timeout: timeout in seconds for getting the response
416
+ :param headers: additional headers to include in the request
417
+ :raises ClientException: HTTP PUT request failed
418
+ """
419
+ if headers is None:
420
+ headers = {}
421
+
422
+ lower_headers = set(k.lower() for k in headers)
423
+ headers_out = gen_headers(headers,
424
+ add_ts='x-timestamp' not in lower_headers)
425
+ path = _make_path(account, container)
426
+ return _make_req(node, part, 'POST', path, headers_out, 'Container',
427
+ conn_timeout, response_timeout)
428
+
429
+
381
430
  def direct_put_container_object(node, part, account, container, obj,
382
431
  conn_timeout=5, response_timeout=15,
383
432
  headers=None):
@@ -28,6 +28,7 @@ from zlib import compressobj
28
28
  from swift.common.exceptions import ClientException
29
29
  from swift.common.http import (HTTP_NOT_FOUND, HTTP_MULTIPLE_CHOICES,
30
30
  is_client_error, is_server_error)
31
+ from swift.common.middleware.gatekeeper import GatekeeperMiddleware
31
32
  from swift.common.request_helpers import USE_REPLICATION_NETWORK_HEADER
32
33
  from swift.common.swob import Request, bytes_to_wsgi
33
34
  from swift.common.utils import quote, close_if_possible, drain_and_close
@@ -144,6 +145,8 @@ class InternalClient(object):
144
145
  :param user_agent: User agent to be sent to requests to Swift.
145
146
  :param request_tries: Number of tries before InternalClient.make_request()
146
147
  gives up.
148
+ :param use_replication_network: Force the client to use the replication
149
+ network over the cluster.
147
150
  :param global_conf: a dict of options to update the loaded proxy config.
148
151
  Options in ``global_conf`` will override those in ``conf_path`` except
149
152
  where the ``conf_path`` option is preceded by ``set``.
@@ -151,12 +154,17 @@ class InternalClient(object):
151
154
  """
152
155
 
153
156
  def __init__(self, conf_path, user_agent, request_tries,
154
- allow_modify_pipeline=False, use_replication_network=False,
155
- global_conf=None, app=None):
157
+ use_replication_network=False, global_conf=None, app=None,
158
+ **kwargs):
156
159
  if request_tries < 1:
157
160
  raise ValueError('request_tries must be positive')
161
+ # Internal clients don't use the gatekeeper and the pipeline remains
162
+ # static so we never allow anything to modify the proxy pipeline.
163
+ if kwargs.get('allow_modify_pipeline'):
164
+ raise ValueError("'allow_modify_pipeline' is no longer supported")
158
165
  self.app = app or loadapp(conf_path, global_conf=global_conf,
159
- allow_modify_pipeline=allow_modify_pipeline,)
166
+ allow_modify_pipeline=False,)
167
+ self.check_gatekeeper_not_loaded(self.app)
160
168
  self.user_agent = \
161
169
  self.app._pipeline_final_app.backend_user_agent = user_agent
162
170
  self.request_tries = request_tries
@@ -167,6 +175,19 @@ class InternalClient(object):
167
175
  self.auto_create_account_prefix = \
168
176
  self.app._pipeline_final_app.auto_create_account_prefix
169
177
 
178
+ @staticmethod
179
+ def check_gatekeeper_not_loaded(app):
180
+ # the Gatekeeper middleware would prevent an InternalClient passing
181
+ # X-Backend-* headers to the proxy app, so ensure it's not present
182
+ try:
183
+ for app in app._pipeline:
184
+ if isinstance(app, GatekeeperMiddleware):
185
+ raise ValueError(
186
+ "Gatekeeper middleware is not allowed in the "
187
+ "InternalClient proxy pipeline")
188
+ except AttributeError:
189
+ pass
190
+
170
191
  def make_request(
171
192
  self, method, path, headers, acceptable_statuses, body_file=None,
172
193
  params=None):