grpc 1.16.0 → 1.17.0.pre1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of grpc might be problematic. Click here for more details.

Files changed (173) hide show
  1. checksums.yaml +4 -4
  2. data/Makefile +299 -133
  3. data/include/grpc/grpc.h +11 -1
  4. data/include/grpc/grpc_posix.h +0 -8
  5. data/include/grpc/impl/codegen/grpc_types.h +3 -0
  6. data/src/core/ext/filters/client_channel/client_channel.cc +336 -345
  7. data/src/core/ext/filters/client_channel/client_channel.h +6 -2
  8. data/src/core/ext/filters/client_channel/client_channel_channelz.cc +3 -1
  9. data/src/core/ext/filters/client_channel/client_channel_channelz.h +0 -7
  10. data/src/core/ext/filters/client_channel/health/health.pb.c +23 -0
  11. data/src/core/ext/filters/client_channel/health/health.pb.h +73 -0
  12. data/src/core/ext/filters/client_channel/health/health_check_client.cc +652 -0
  13. data/src/core/ext/filters/client_channel/health/health_check_client.h +173 -0
  14. data/src/core/ext/filters/client_channel/http_connect_handshaker.cc +2 -1
  15. data/src/core/ext/filters/client_channel/http_proxy.cc +1 -1
  16. data/src/core/ext/filters/client_channel/lb_policy.h +17 -14
  17. data/src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc +15 -11
  18. data/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc +21 -15
  19. data/src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc +18 -10
  20. data/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc +12 -9
  21. data/src/core/ext/filters/client_channel/lb_policy/subchannel_list.h +19 -8
  22. data/src/core/ext/filters/client_channel/lb_policy/xds/xds.cc +1832 -0
  23. data/src/core/ext/filters/client_channel/lb_policy/xds/xds.h +36 -0
  24. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h +36 -0
  25. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_channel_secure.cc +107 -0
  26. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.cc +85 -0
  27. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h +72 -0
  28. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.cc +307 -0
  29. data/src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h +89 -0
  30. data/src/core/ext/filters/client_channel/lb_policy_factory.h +1 -1
  31. data/src/core/ext/filters/client_channel/lb_policy_registry.cc +5 -0
  32. data/src/core/ext/filters/client_channel/lb_policy_registry.h +4 -0
  33. data/src/core/ext/filters/client_channel/parse_address.h +1 -1
  34. data/src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc +19 -22
  35. data/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc +41 -39
  36. data/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h +3 -2
  37. data/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc +4 -1
  38. data/src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc +15 -2
  39. data/src/core/ext/filters/client_channel/resolver/fake/fake_resolver.h +5 -1
  40. data/src/core/ext/filters/client_channel/resolver_factory.h +1 -1
  41. data/src/core/ext/filters/client_channel/resolver_result_parsing.cc +384 -0
  42. data/src/core/ext/filters/client_channel/resolver_result_parsing.h +146 -0
  43. data/src/core/ext/filters/client_channel/subchannel.cc +361 -103
  44. data/src/core/ext/filters/client_channel/subchannel.h +14 -8
  45. data/src/core/ext/filters/deadline/deadline_filter.cc +19 -23
  46. data/src/core/ext/filters/deadline/deadline_filter.h +9 -13
  47. data/src/core/ext/filters/http/client/http_client_filter.cc +29 -19
  48. data/src/core/ext/filters/http/client_authority_filter.cc +2 -3
  49. data/src/core/ext/filters/http/message_compress/message_compress_filter.cc +28 -16
  50. data/src/core/ext/filters/http/server/http_server_filter.cc +31 -20
  51. data/src/core/ext/filters/message_size/message_size_filter.cc +50 -45
  52. data/src/core/ext/transport/chttp2/client/chttp2_connector.cc +13 -6
  53. data/src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc +1 -1
  54. data/src/core/ext/transport/chttp2/server/chttp2_server.cc +58 -8
  55. data/src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc +1 -1
  56. data/src/core/ext/transport/chttp2/transport/chttp2_transport.cc +175 -173
  57. data/src/core/ext/transport/chttp2/transport/chttp2_transport.h +2 -1
  58. data/src/core/ext/transport/chttp2/transport/frame_data.cc +4 -10
  59. data/src/core/ext/transport/chttp2/transport/frame_data.h +10 -12
  60. data/src/core/ext/transport/chttp2/transport/frame_rst_stream.cc +1 -1
  61. data/src/core/ext/transport/chttp2/transport/hpack_encoder.cc +28 -25
  62. data/src/core/ext/transport/chttp2/transport/incoming_metadata.cc +0 -12
  63. data/src/core/ext/transport/chttp2/transport/incoming_metadata.h +12 -9
  64. data/src/core/ext/transport/chttp2/transport/internal.h +109 -94
  65. data/src/core/ext/transport/chttp2/transport/parsing.cc +4 -2
  66. data/src/core/ext/transport/inproc/inproc_transport.cc +280 -300
  67. data/src/core/lib/channel/channel_stack.cc +5 -4
  68. data/src/core/lib/channel/channel_stack.h +4 -4
  69. data/src/core/lib/channel/channel_stack_builder.cc +14 -2
  70. data/src/core/lib/channel/channel_stack_builder.h +8 -0
  71. data/src/core/lib/channel/channel_trace.cc +6 -2
  72. data/src/core/lib/channel/channelz.cc +137 -5
  73. data/src/core/lib/channel/channelz.h +32 -6
  74. data/src/core/lib/channel/channelz_registry.cc +134 -28
  75. data/src/core/lib/channel/channelz_registry.h +25 -3
  76. data/src/core/lib/channel/context.h +4 -4
  77. data/src/core/lib/channel/handshaker.cc +7 -6
  78. data/src/core/lib/channel/handshaker.h +7 -8
  79. data/src/core/lib/channel/handshaker_factory.cc +3 -2
  80. data/src/core/lib/channel/handshaker_factory.h +2 -0
  81. data/src/core/lib/channel/handshaker_registry.cc +6 -2
  82. data/src/core/lib/channel/handshaker_registry.h +1 -0
  83. data/src/core/lib/gpr/arena.cc +84 -37
  84. data/src/core/lib/gpr/arena.h +2 -0
  85. data/src/core/lib/gpr/mpscq.h +4 -2
  86. data/src/core/lib/gprpp/inlined_vector.h +8 -0
  87. data/src/core/lib/gprpp/ref_counted.h +105 -18
  88. data/src/core/lib/gprpp/ref_counted_ptr.h +11 -0
  89. data/src/core/lib/http/httpcli_security_connector.cc +7 -4
  90. data/src/core/lib/iomgr/call_combiner.cc +2 -0
  91. data/src/core/lib/iomgr/call_combiner.h +2 -2
  92. data/src/core/lib/iomgr/closure.h +1 -0
  93. data/src/core/lib/iomgr/error.cc +16 -31
  94. data/src/core/lib/iomgr/error.h +29 -4
  95. data/src/core/lib/iomgr/error_internal.h +0 -2
  96. data/src/core/lib/iomgr/ev_epoll1_linux.cc +7 -3
  97. data/src/core/lib/iomgr/ev_posix.cc +0 -2
  98. data/src/core/lib/iomgr/polling_entity.h +4 -4
  99. data/src/core/lib/iomgr/resource_quota.cc +64 -10
  100. data/src/core/lib/iomgr/resource_quota.h +21 -6
  101. data/src/core/lib/iomgr/socket_utils_common_posix.cc +11 -5
  102. data/src/core/lib/iomgr/tcp_client_custom.cc +14 -3
  103. data/src/core/lib/iomgr/tcp_client_posix.cc +2 -0
  104. data/src/core/lib/iomgr/tcp_posix.cc +4 -2
  105. data/src/core/lib/iomgr/timer_manager.cc +1 -1
  106. data/src/core/lib/iomgr/wakeup_fd_eventfd.cc +3 -4
  107. data/src/core/lib/security/context/security_context.cc +20 -13
  108. data/src/core/lib/security/context/security_context.h +27 -19
  109. data/src/core/lib/security/credentials/alts/alts_credentials.cc +1 -1
  110. data/src/core/lib/security/credentials/credentials.h +2 -2
  111. data/src/core/lib/security/credentials/fake/fake_credentials.cc +1 -0
  112. data/src/core/lib/security/credentials/google_default/google_default_credentials.cc +39 -54
  113. data/src/core/lib/security/credentials/google_default/google_default_credentials.h +3 -2
  114. data/src/core/lib/security/credentials/local/local_credentials.cc +1 -1
  115. data/src/core/lib/security/credentials/plugin/plugin_credentials.cc +1 -2
  116. data/src/core/lib/security/credentials/ssl/ssl_credentials.h +2 -0
  117. data/src/core/lib/security/security_connector/{alts_security_connector.cc → alts/alts_security_connector.cc} +10 -9
  118. data/src/core/lib/security/security_connector/{alts_security_connector.h → alts/alts_security_connector.h} +3 -3
  119. data/src/core/lib/security/security_connector/fake/fake_security_connector.cc +310 -0
  120. data/src/core/lib/security/security_connector/fake/fake_security_connector.h +42 -0
  121. data/src/core/lib/security/security_connector/{local_security_connector.cc → local/local_security_connector.cc} +4 -3
  122. data/src/core/lib/security/security_connector/{local_security_connector.h → local/local_security_connector.h} +3 -3
  123. data/src/core/lib/security/security_connector/security_connector.cc +4 -1039
  124. data/src/core/lib/security/security_connector/security_connector.h +6 -114
  125. data/src/core/lib/security/security_connector/ssl/ssl_security_connector.cc +474 -0
  126. data/src/core/lib/security/security_connector/ssl/ssl_security_connector.h +77 -0
  127. data/src/core/lib/security/security_connector/ssl_utils.cc +345 -0
  128. data/src/core/lib/security/security_connector/ssl_utils.h +93 -0
  129. data/src/core/lib/security/transport/client_auth_filter.cc +28 -17
  130. data/src/core/lib/security/transport/secure_endpoint.cc +51 -41
  131. data/src/core/lib/security/transport/security_handshaker.cc +6 -7
  132. data/src/core/lib/security/transport/server_auth_filter.cc +39 -31
  133. data/src/core/lib/surface/call.cc +100 -80
  134. data/src/core/lib/surface/call.h +4 -0
  135. data/src/core/lib/surface/channel.cc +27 -13
  136. data/src/core/lib/surface/channel.h +4 -3
  137. data/src/core/lib/surface/completion_queue.cc +8 -1
  138. data/src/core/lib/surface/init.cc +1 -0
  139. data/src/core/lib/surface/server.cc +111 -46
  140. data/src/core/lib/surface/server.h +16 -2
  141. data/src/core/lib/surface/version.cc +2 -2
  142. data/src/core/lib/transport/error_utils.cc +4 -2
  143. data/src/core/lib/transport/metadata.cc +3 -2
  144. data/src/core/lib/transport/metadata.h +3 -2
  145. data/src/core/lib/transport/metadata_batch.cc +1 -0
  146. data/src/core/lib/transport/metadata_batch.h +4 -2
  147. data/src/core/lib/transport/static_metadata.cc +225 -221
  148. data/src/core/lib/transport/static_metadata.h +74 -71
  149. data/src/core/lib/transport/transport.h +44 -26
  150. data/src/core/{ext/filters/client_channel → lib/uri}/uri_parser.cc +1 -1
  151. data/src/core/{ext/filters/client_channel → lib/uri}/uri_parser.h +3 -3
  152. data/src/core/plugin_registry/grpc_plugin_registry.cc +4 -4
  153. data/src/core/tsi/alts/handshaker/alts_handshaker_client.cc +356 -77
  154. data/src/core/tsi/alts/handshaker/alts_handshaker_client.h +46 -36
  155. data/src/core/tsi/alts/handshaker/alts_shared_resource.cc +83 -0
  156. data/src/core/tsi/alts/handshaker/alts_shared_resource.h +73 -0
  157. data/src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc +122 -175
  158. data/src/core/tsi/alts/handshaker/alts_tsi_handshaker.h +33 -22
  159. data/src/core/tsi/alts/handshaker/alts_tsi_handshaker_private.h +38 -10
  160. data/src/core/tsi/transport_security.cc +18 -1
  161. data/src/core/tsi/transport_security.h +2 -1
  162. data/src/ruby/ext/grpc/rb_grpc_imports.generated.c +4 -2
  163. data/src/ruby/ext/grpc/rb_grpc_imports.generated.h +6 -3
  164. data/src/ruby/lib/grpc/version.rb +1 -1
  165. data/src/ruby/spec/pb/codegen/grpc/testing/package_options.proto +28 -0
  166. data/src/ruby/spec/pb/codegen/package_option_spec.rb +2 -3
  167. metadata +58 -40
  168. data/src/core/ext/filters/client_channel/method_params.cc +0 -178
  169. data/src/core/ext/filters/client_channel/method_params.h +0 -78
  170. data/src/core/tsi/alts/handshaker/alts_tsi_event.cc +0 -75
  171. data/src/core/tsi/alts/handshaker/alts_tsi_event.h +0 -93
  172. data/src/core/tsi/alts_transport_security.cc +0 -65
  173. data/src/core/tsi/alts_transport_security.h +0 -47
@@ -30,6 +30,10 @@
30
30
  namespace grpc_core {
31
31
  namespace channelz {
32
32
 
33
+ namespace testing {
34
+ class ChannelzRegistryPeer;
35
+ }
36
+
33
37
  // singleton registry object to track all objects that are needed to support
34
38
  // channelz bookkeeping. All objects share globally distributed uuids.
35
39
  class ChannelzRegistry {
@@ -40,7 +44,7 @@ class ChannelzRegistry {
40
44
  // To be called in grpc_shutdown();
41
45
  static void Shutdown();
42
46
 
43
- static intptr_t Register(BaseNode* node) {
47
+ static void Register(BaseNode* node) {
44
48
  return Default()->InternalRegister(node);
45
49
  }
46
50
  static void Unregister(intptr_t uuid) { Default()->InternalUnregister(uuid); }
@@ -58,9 +62,14 @@ class ChannelzRegistry {
58
62
  return Default()->InternalGetServers(start_server_id);
59
63
  }
60
64
 
65
+ // Test only helper function to dump the JSON representation to std out.
66
+ // This can aid in debugging channelz code.
67
+ static void LogAllEntities() { Default()->InternalLogAllEntities(); }
68
+
61
69
  private:
62
70
  GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_NEW
63
71
  GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
72
+ friend class testing::ChannelzRegistryPeer;
64
73
 
65
74
  ChannelzRegistry();
66
75
  ~ChannelzRegistry();
@@ -69,7 +78,7 @@ class ChannelzRegistry {
69
78
  static ChannelzRegistry* Default();
70
79
 
71
80
  // globally registers an Entry. Returns its unique uuid
72
- intptr_t InternalRegister(BaseNode* node);
81
+ void InternalRegister(BaseNode* node);
73
82
 
74
83
  // globally unregisters the object that is associated to uuid. Also does
75
84
  // sanity check that an object doesn't try to unregister the wrong type.
@@ -82,9 +91,22 @@ class ChannelzRegistry {
82
91
  char* InternalGetTopChannels(intptr_t start_channel_id);
83
92
  char* InternalGetServers(intptr_t start_server_id);
84
93
 
85
- // protects entities_ and uuid_
94
+ // If entities_ has over a certain threshold of empty slots, it will
95
+ // compact the vector and move all used slots to the front.
96
+ void MaybePerformCompactionLocked();
97
+
98
+ // Performs binary search on entities_ to find the index with that uuid.
99
+ // If direct_hit_needed, then will return -1 in case of absence.
100
+ // Else, will return idx of the first uuid higher than the target.
101
+ int FindByUuidLocked(intptr_t uuid, bool direct_hit_needed);
102
+
103
+ void InternalLogAllEntities();
104
+
105
+ // protects members
86
106
  gpr_mu mu_;
87
107
  InlinedVector<BaseNode*, 20> entities_;
108
+ intptr_t uuid_generator_ = 0;
109
+ int num_empty_slots_ = 0;
88
110
  };
89
111
 
90
112
  } // namespace channelz
@@ -41,9 +41,9 @@ typedef enum {
41
41
  GRPC_CONTEXT_COUNT
42
42
  } grpc_context_index;
43
43
 
44
- typedef struct {
45
- void* value;
46
- void (*destroy)(void*);
47
- } grpc_call_context_element;
44
+ struct grpc_call_context_element {
45
+ void* value = nullptr;
46
+ void (*destroy)(void*) = nullptr;
47
+ };
48
48
 
49
49
  #endif /* GRPC_CORE_LIB_CHANNEL_CONTEXT_H */
@@ -292,17 +292,18 @@ static void on_timeout(void* arg, grpc_error* error) {
292
292
  grpc_handshake_manager_unref(mgr);
293
293
  }
294
294
 
295
- void grpc_handshake_manager_do_handshake(
296
- grpc_handshake_manager* mgr, grpc_pollset_set* interested_parties,
297
- grpc_endpoint* endpoint, const grpc_channel_args* channel_args,
298
- grpc_millis deadline, grpc_tcp_server_acceptor* acceptor,
299
- grpc_iomgr_cb_func on_handshake_done, void* user_data) {
295
+ void grpc_handshake_manager_do_handshake(grpc_handshake_manager* mgr,
296
+ grpc_endpoint* endpoint,
297
+ const grpc_channel_args* channel_args,
298
+ grpc_millis deadline,
299
+ grpc_tcp_server_acceptor* acceptor,
300
+ grpc_iomgr_cb_func on_handshake_done,
301
+ void* user_data) {
300
302
  gpr_mu_lock(&mgr->mu);
301
303
  GPR_ASSERT(mgr->index == 0);
302
304
  GPR_ASSERT(!mgr->shutdown);
303
305
  // Construct handshaker args. These will be passed through all
304
306
  // handshakers and eventually be freed by the on_handshake_done callback.
305
- mgr->args.interested_parties = interested_parties;
306
307
  mgr->args.endpoint = endpoint;
307
308
  mgr->args.args = grpc_channel_args_copy(channel_args);
308
309
  mgr->args.user_data = user_data;
@@ -56,7 +56,6 @@ typedef struct grpc_handshaker grpc_handshaker;
56
56
  /// For the on_handshake_done callback, all members are input arguments,
57
57
  /// which the callback takes ownership of.
58
58
  typedef struct {
59
- grpc_pollset_set* interested_parties;
60
59
  grpc_endpoint* endpoint;
61
60
  grpc_channel_args* args;
62
61
  grpc_slice_buffer* read_buffer;
@@ -132,8 +131,6 @@ void grpc_handshake_manager_shutdown(grpc_handshake_manager* mgr,
132
131
  grpc_error* why);
133
132
 
134
133
  /// Invokes handshakers in the order they were added.
135
- /// \a interested_parties may be non-nullptr to provide a pollset_set that
136
- /// may be used during handshaking. Ownership is not taken.
137
134
  /// Takes ownership of \a endpoint, and then passes that ownership to
138
135
  /// the \a on_handshake_done callback.
139
136
  /// Does NOT take ownership of \a channel_args. Instead, makes a copy before
@@ -145,11 +142,13 @@ void grpc_handshake_manager_shutdown(grpc_handshake_manager* mgr,
145
142
  /// GRPC_ERROR_NONE, then handshaking failed and the handshaker has done
146
143
  /// the necessary clean-up. Otherwise, the callback takes ownership of
147
144
  /// the arguments.
148
- void grpc_handshake_manager_do_handshake(
149
- grpc_handshake_manager* mgr, grpc_pollset_set* interested_parties,
150
- grpc_endpoint* endpoint, const grpc_channel_args* channel_args,
151
- grpc_millis deadline, grpc_tcp_server_acceptor* acceptor,
152
- grpc_iomgr_cb_func on_handshake_done, void* user_data);
145
+ void grpc_handshake_manager_do_handshake(grpc_handshake_manager* mgr,
146
+ grpc_endpoint* endpoint,
147
+ const grpc_channel_args* channel_args,
148
+ grpc_millis deadline,
149
+ grpc_tcp_server_acceptor* acceptor,
150
+ grpc_iomgr_cb_func on_handshake_done,
151
+ void* user_data);
153
152
 
154
153
  /// Add \a mgr to the server side list of all pending handshake managers, the
155
154
  /// list starts with \a *head.
@@ -24,11 +24,12 @@
24
24
 
25
25
  void grpc_handshaker_factory_add_handshakers(
26
26
  grpc_handshaker_factory* handshaker_factory, const grpc_channel_args* args,
27
+ grpc_pollset_set* interested_parties,
27
28
  grpc_handshake_manager* handshake_mgr) {
28
29
  if (handshaker_factory != nullptr) {
29
30
  GPR_ASSERT(handshaker_factory->vtable != nullptr);
30
- handshaker_factory->vtable->add_handshakers(handshaker_factory, args,
31
- handshake_mgr);
31
+ handshaker_factory->vtable->add_handshakers(
32
+ handshaker_factory, args, interested_parties, handshake_mgr);
32
33
  }
33
34
  }
34
35
 
@@ -32,6 +32,7 @@ typedef struct grpc_handshaker_factory grpc_handshaker_factory;
32
32
  typedef struct {
33
33
  void (*add_handshakers)(grpc_handshaker_factory* handshaker_factory,
34
34
  const grpc_channel_args* args,
35
+ grpc_pollset_set* interested_parties,
35
36
  grpc_handshake_manager* handshake_mgr);
36
37
  void (*destroy)(grpc_handshaker_factory* handshaker_factory);
37
38
  } grpc_handshaker_factory_vtable;
@@ -42,6 +43,7 @@ struct grpc_handshaker_factory {
42
43
 
43
44
  void grpc_handshaker_factory_add_handshakers(
44
45
  grpc_handshaker_factory* handshaker_factory, const grpc_channel_args* args,
46
+ grpc_pollset_set* interested_parties,
45
47
  grpc_handshake_manager* handshake_mgr);
46
48
 
47
49
  void grpc_handshaker_factory_destroy(
@@ -51,9 +51,11 @@ static void grpc_handshaker_factory_list_register(
51
51
 
52
52
  static void grpc_handshaker_factory_list_add_handshakers(
53
53
  grpc_handshaker_factory_list* list, const grpc_channel_args* args,
54
+ grpc_pollset_set* interested_parties,
54
55
  grpc_handshake_manager* handshake_mgr) {
55
56
  for (size_t i = 0; i < list->num_factories; ++i) {
56
- grpc_handshaker_factory_add_handshakers(list->list[i], args, handshake_mgr);
57
+ grpc_handshaker_factory_add_handshakers(list->list[i], args,
58
+ interested_parties, handshake_mgr);
57
59
  }
58
60
  }
59
61
 
@@ -91,7 +93,9 @@ void grpc_handshaker_factory_register(bool at_start,
91
93
 
92
94
  void grpc_handshakers_add(grpc_handshaker_type handshaker_type,
93
95
  const grpc_channel_args* args,
96
+ grpc_pollset_set* interested_parties,
94
97
  grpc_handshake_manager* handshake_mgr) {
95
98
  grpc_handshaker_factory_list_add_handshakers(
96
- &g_handshaker_factory_lists[handshaker_type], args, handshake_mgr);
99
+ &g_handshaker_factory_lists[handshaker_type], args, interested_parties,
100
+ handshake_mgr);
97
101
  }
@@ -43,6 +43,7 @@ void grpc_handshaker_factory_register(bool at_start,
43
43
 
44
44
  void grpc_handshakers_add(grpc_handshaker_type handshaker_type,
45
45
  const grpc_channel_args* args,
46
+ grpc_pollset_set* interested_parties,
46
47
  grpc_handshake_manager* handshake_mgr);
47
48
 
48
49
  #endif /* GRPC_CORE_LIB_CHANNEL_HANDSHAKER_REGISTRY_H */
@@ -21,6 +21,7 @@
21
21
  #include "src/core/lib/gpr/arena.h"
22
22
 
23
23
  #include <string.h>
24
+ #include <new>
24
25
 
25
26
  #include <grpc/support/alloc.h>
26
27
  #include <grpc/support/atm.h>
@@ -28,34 +29,79 @@
28
29
  #include <grpc/support/sync.h>
29
30
 
30
31
  #include "src/core/lib/gpr/alloc.h"
32
+ #include "src/core/lib/gpr/env.h"
33
+ #include "src/core/lib/gprpp/memory.h"
34
+
35
+ namespace {
36
+ enum init_strategy {
37
+ NO_INIT, // Do not initialize the arena blocks.
38
+ ZERO_INIT, // Initialize arena blocks with 0.
39
+ NON_ZERO_INIT, // Initialize arena blocks with a non-zero value.
40
+ };
41
+
42
+ gpr_once g_init_strategy_once = GPR_ONCE_INIT;
43
+ init_strategy g_init_strategy = NO_INIT;
44
+ } // namespace
45
+
46
+ static void set_strategy_from_env() {
47
+ char* str = gpr_getenv("GRPC_ARENA_INIT_STRATEGY");
48
+ if (str == nullptr) {
49
+ g_init_strategy = NO_INIT;
50
+ } else if (strcmp(str, "zero_init") == 0) {
51
+ g_init_strategy = ZERO_INIT;
52
+ } else if (strcmp(str, "non_zero_init") == 0) {
53
+ g_init_strategy = NON_ZERO_INIT;
54
+ } else {
55
+ g_init_strategy = NO_INIT;
56
+ }
57
+ gpr_free(str);
58
+ }
59
+
60
+ static void* gpr_arena_alloc_maybe_init(size_t size) {
61
+ void* mem = gpr_malloc_aligned(size, GPR_MAX_ALIGNMENT);
62
+ gpr_once_init(&g_init_strategy_once, set_strategy_from_env);
63
+ if (GPR_UNLIKELY(g_init_strategy != NO_INIT)) {
64
+ if (g_init_strategy == ZERO_INIT) {
65
+ memset(mem, 0, size);
66
+ } else { // NON_ZERO_INIT.
67
+ memset(mem, 0xFE, size);
68
+ }
69
+ }
70
+ return mem;
71
+ }
72
+
73
+ void gpr_arena_init() {
74
+ gpr_once_init(&g_init_strategy_once, set_strategy_from_env);
75
+ }
31
76
 
32
77
  // Uncomment this to use a simple arena that simply allocates the
33
78
  // requested amount of memory for each call to gpr_arena_alloc(). This
34
79
  // effectively eliminates the efficiency gain of using an arena, but it
35
80
  // may be useful for debugging purposes.
36
81
  //#define SIMPLE_ARENA_FOR_DEBUGGING
37
-
38
82
  #ifdef SIMPLE_ARENA_FOR_DEBUGGING
39
83
 
40
84
  struct gpr_arena {
85
+ gpr_arena() { gpr_mu_init(&mu); }
86
+ ~gpr_arena() {
87
+ gpr_mu_destroy(&mu);
88
+ for (size_t i = 0; i < num_ptrs; ++i) {
89
+ gpr_free_aligned(ptrs[i]);
90
+ }
91
+ gpr_free(ptrs);
92
+ }
93
+
41
94
  gpr_mu mu;
42
- void** ptrs;
43
- size_t num_ptrs;
95
+ void** ptrs = nullptr;
96
+ size_t num_ptrs = 0;
44
97
  };
45
98
 
46
99
  gpr_arena* gpr_arena_create(size_t ignored_initial_size) {
47
- gpr_arena* arena = (gpr_arena*)gpr_zalloc(sizeof(*arena));
48
- gpr_mu_init(&arena->mu);
49
- return arena;
100
+ return grpc_core::New<gpr_arena>();
50
101
  }
51
102
 
52
103
  size_t gpr_arena_destroy(gpr_arena* arena) {
53
- gpr_mu_destroy(&arena->mu);
54
- for (size_t i = 0; i < arena->num_ptrs; ++i) {
55
- gpr_free(arena->ptrs[i]);
56
- }
57
- gpr_free(arena->ptrs);
58
- gpr_free(arena);
104
+ grpc_core::Delete(arena);
59
105
  return 1; // Value doesn't matter, since it won't be used.
60
106
  }
61
107
 
@@ -63,7 +109,8 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
63
109
  gpr_mu_lock(&arena->mu);
64
110
  arena->ptrs =
65
111
  (void**)gpr_realloc(arena->ptrs, sizeof(void*) * (arena->num_ptrs + 1));
66
- void* retval = arena->ptrs[arena->num_ptrs++] = gpr_zalloc(size);
112
+ void* retval = arena->ptrs[arena->num_ptrs++] =
113
+ gpr_arena_alloc_maybe_init(size);
67
114
  gpr_mu_unlock(&arena->mu);
68
115
  return retval;
69
116
  }
@@ -77,45 +124,45 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
77
124
  // would allow us to use the alignment actually needed by the caller.
78
125
 
79
126
  typedef struct zone {
80
- zone* next;
127
+ zone* next = nullptr;
81
128
  } zone;
82
129
 
83
130
  struct gpr_arena {
131
+ gpr_arena(size_t initial_size)
132
+ : initial_zone_size(initial_size), last_zone(&initial_zone) {
133
+ gpr_mu_init(&arena_growth_mutex);
134
+ }
135
+ ~gpr_arena() {
136
+ gpr_mu_destroy(&arena_growth_mutex);
137
+ zone* z = initial_zone.next;
138
+ while (z) {
139
+ zone* next_z = z->next;
140
+ z->~zone();
141
+ gpr_free_aligned(z);
142
+ z = next_z;
143
+ }
144
+ }
145
+
84
146
  // Keep track of the total used size. We use this in our call sizing
85
147
  // historesis.
86
- gpr_atm total_used;
148
+ gpr_atm total_used = 0;
87
149
  size_t initial_zone_size;
88
150
  zone initial_zone;
89
151
  zone* last_zone;
90
152
  gpr_mu arena_growth_mutex;
91
153
  };
92
154
 
93
- static void* zalloc_aligned(size_t size) {
94
- void* ptr = gpr_malloc_aligned(size, GPR_MAX_ALIGNMENT);
95
- memset(ptr, 0, size);
96
- return ptr;
97
- }
98
-
99
155
  gpr_arena* gpr_arena_create(size_t initial_size) {
100
156
  initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size);
101
- gpr_arena* a = static_cast<gpr_arena*>(zalloc_aligned(
102
- GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size));
103
- a->initial_zone_size = initial_size;
104
- a->last_zone = &a->initial_zone;
105
- gpr_mu_init(&a->arena_growth_mutex);
106
- return a;
157
+ return new (gpr_arena_alloc_maybe_init(
158
+ GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size))
159
+ gpr_arena(initial_size);
107
160
  }
108
161
 
109
162
  size_t gpr_arena_destroy(gpr_arena* arena) {
110
- gpr_mu_destroy(&arena->arena_growth_mutex);
111
- gpr_atm size = gpr_atm_no_barrier_load(&arena->total_used);
112
- zone* z = arena->initial_zone.next;
163
+ const gpr_atm size = gpr_atm_no_barrier_load(&arena->total_used);
164
+ arena->~gpr_arena();
113
165
  gpr_free_aligned(arena);
114
- while (z) {
115
- zone* next_z = z->next;
116
- gpr_free_aligned(z);
117
- z = next_z;
118
- }
119
166
  return static_cast<size_t>(size);
120
167
  }
121
168
 
@@ -132,8 +179,8 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
132
179
  // sizing historesis (that is, most calls should have a large enough initial
133
180
  // zone and will not need to grow the arena).
134
181
  gpr_mu_lock(&arena->arena_growth_mutex);
135
- zone* z = static_cast<zone*>(
136
- zalloc_aligned(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + size));
182
+ zone* z = new (gpr_arena_alloc_maybe_init(
183
+ GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + size)) zone();
137
184
  arena->last_zone->next = z;
138
185
  arena->last_zone = z;
139
186
  gpr_mu_unlock(&arena->arena_growth_mutex);
@@ -37,5 +37,7 @@ gpr_arena* gpr_arena_create(size_t initial_size);
37
37
  void* gpr_arena_alloc(gpr_arena* arena, size_t size);
38
38
  // Destroy an arena, returning the total number of bytes allocated
39
39
  size_t gpr_arena_destroy(gpr_arena* arena);
40
+ // Initializes the Arena component.
41
+ void gpr_arena_init();
40
42
 
41
43
  #endif /* GRPC_CORE_LIB_GPR_ARENA_H */
@@ -38,9 +38,11 @@ typedef struct gpr_mpscq_node {
38
38
 
39
39
  // Actual queue type
40
40
  typedef struct gpr_mpscq {
41
- gpr_atm head;
42
41
  // make sure head & tail don't share a cacheline
43
- char padding[GPR_CACHELINE_SIZE];
42
+ union {
43
+ char padding[GPR_CACHELINE_SIZE];
44
+ gpr_atm head;
45
+ };
44
46
  gpr_mpscq_node* tail;
45
47
  gpr_mpscq_node stub;
46
48
  } gpr_mpscq;
@@ -123,6 +123,14 @@ class InlinedVector {
123
123
 
124
124
  void push_back(T&& value) { emplace_back(std::move(value)); }
125
125
 
126
+ void pop_back() {
127
+ assert(!empty());
128
+ size_t s = size();
129
+ T& value = data()[s - 1];
130
+ value.~T();
131
+ size_--;
132
+ }
133
+
126
134
  void copy_from(const InlinedVector& v) {
127
135
  // if v is allocated, copy over the buffer.
128
136
  if (v.dynamic_ != nullptr) {
@@ -24,6 +24,8 @@
24
24
  #include <grpc/support/log.h>
25
25
  #include <grpc/support/sync.h>
26
26
 
27
+ #include <atomic>
28
+ #include <cassert>
27
29
  #include <cinttypes>
28
30
 
29
31
  #include "src/core/lib/debug/trace.h"
@@ -34,14 +36,99 @@
34
36
 
35
37
  namespace grpc_core {
36
38
 
39
+ // PolymorphicRefCount enforces polymorphic destruction of RefCounted.
40
+ class PolymorphicRefCount {
41
+ public:
42
+ GRPC_ABSTRACT_BASE_CLASS
43
+
44
+ protected:
45
+ GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
46
+
47
+ virtual ~PolymorphicRefCount() = default;
48
+ };
49
+
50
+ // NonPolymorphicRefCount does not enforce polymorphic destruction of
51
+ // RefCounted. Please refer to grpc_core::RefCounted for more details, and
52
+ // when in doubt use PolymorphicRefCount.
53
+ class NonPolymorphicRefCount {
54
+ public:
55
+ GRPC_ABSTRACT_BASE_CLASS
56
+
57
+ protected:
58
+ GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
59
+
60
+ ~NonPolymorphicRefCount() = default;
61
+ };
62
+
63
+ // RefCount is a simple atomic ref-count.
64
+ //
65
+ // This is a C++ implementation of gpr_refcount, with inline functions. Due to
66
+ // inline functions, this class is significantly more efficient than
67
+ // gpr_refcount and should be preferred over gpr_refcount whenever possible.
68
+ //
69
+ // TODO(soheil): Remove gpr_refcount after submitting the GRFC and the paragraph
70
+ // above.
71
+ class RefCount {
72
+ public:
73
+ using Value = intptr_t;
74
+
75
+ // `init` is the initial refcount stored in this object.
76
+ constexpr explicit RefCount(Value init = 1) : value_(init) {}
77
+
78
+ // Increases the ref-count by `n`.
79
+ void Ref(Value n = 1) { value_.fetch_add(n, std::memory_order_relaxed); }
80
+
81
+ // Similar to Ref() with an assert on the ref-count being non-zero.
82
+ void RefNonZero() {
83
+ #ifndef NDEBUG
84
+ const Value prior = value_.fetch_add(1, std::memory_order_relaxed);
85
+ assert(prior > 0);
86
+ #else
87
+ Ref();
88
+ #endif
89
+ }
90
+
91
+ // Decrements the ref-count and returns true if the ref-count reaches 0.
92
+ bool Unref() {
93
+ const Value prior = value_.fetch_sub(1, std::memory_order_acq_rel);
94
+ GPR_DEBUG_ASSERT(prior > 0);
95
+ return prior == 1;
96
+ }
97
+
98
+ Value get() const { return value_.load(std::memory_order_relaxed); }
99
+
100
+ private:
101
+ std::atomic<Value> value_;
102
+ };
103
+
37
104
  // A base class for reference-counted objects.
38
105
  // New objects should be created via New() and start with a refcount of 1.
39
106
  // When the refcount reaches 0, the object will be deleted via Delete().
40
107
  //
41
108
  // This will commonly be used by CRTP (curiously-recurring template pattern)
42
109
  // e.g., class MyClass : public RefCounted<MyClass>
43
- template <typename Child>
44
- class RefCounted {
110
+ //
111
+ // Use PolymorphicRefCount and NonPolymorphicRefCount to select between
112
+ // different implementations of RefCounted.
113
+ //
114
+ // Note that NonPolymorphicRefCount does not support polymorphic destruction.
115
+ // So, use NonPolymorphicRefCount only when both of the following conditions
116
+ // are guaranteed to hold:
117
+ // (a) Child is a concrete leaf class in RefCounted<Child>, and
118
+ // (b) you are gauranteed to call Unref only on concrete leaf classes and not
119
+ // their parents.
120
+ //
121
+ // The following example is illegal, because calling Unref() will not call
122
+ // the dtor of Child.
123
+ //
124
+ // class Parent : public RefCounted<Parent, NonPolymorphicRefCount> {}
125
+ // class Child : public Parent {}
126
+ //
127
+ // Child* ch;
128
+ // ch->Unref();
129
+ //
130
+ template <typename Child, typename Impl = PolymorphicRefCount>
131
+ class RefCounted : public Impl {
45
132
  public:
46
133
  RefCountedPtr<Child> Ref() GRPC_MUST_USE_RESULT {
47
134
  IncrementRefCount();
@@ -53,7 +140,7 @@ class RefCounted {
53
140
  // private, since it will only be used by RefCountedPtr<>, which is a
54
141
  // friend of this class.
55
142
  void Unref() {
56
- if (gpr_unref(&refs_)) {
143
+ if (refs_.Unref()) {
57
144
  Delete(static_cast<Child*>(this));
58
145
  }
59
146
  }
@@ -67,18 +154,19 @@ class RefCounted {
67
154
  protected:
68
155
  GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
69
156
 
70
- RefCounted() { gpr_ref_init(&refs_, 1); }
157
+ RefCounted() = default;
71
158
 
72
- virtual ~RefCounted() {}
159
+ // Note: Depending on the Impl used, this dtor can be implicitly virtual.
160
+ ~RefCounted() = default;
73
161
 
74
162
  private:
75
163
  // Allow RefCountedPtr<> to access IncrementRefCount().
76
164
  template <typename T>
77
165
  friend class RefCountedPtr;
78
166
 
79
- void IncrementRefCount() { gpr_ref(&refs_); }
167
+ void IncrementRefCount() { refs_.Ref(); }
80
168
 
81
- gpr_refcount refs_;
169
+ RefCount refs_;
82
170
  };
83
171
 
84
172
  // An alternative version of the RefCounted base class that
@@ -87,8 +175,8 @@ class RefCounted {
87
175
  // pointers and legacy code that is manually calling Ref() and Unref().
88
176
  // Once all of our code is converted to idiomatic C++, we may be able to
89
177
  // eliminate this class.
90
- template <typename Child>
91
- class RefCountedWithTracing {
178
+ template <typename Child, typename Impl = PolymorphicRefCount>
179
+ class RefCountedWithTracing : public Impl {
92
180
  public:
93
181
  RefCountedPtr<Child> Ref() GRPC_MUST_USE_RESULT {
94
182
  IncrementRefCount();
@@ -98,7 +186,7 @@ class RefCountedWithTracing {
98
186
  RefCountedPtr<Child> Ref(const DebugLocation& location,
99
187
  const char* reason) GRPC_MUST_USE_RESULT {
100
188
  if (location.Log() && trace_flag_ != nullptr && trace_flag_->enabled()) {
101
- gpr_atm old_refs = gpr_atm_no_barrier_load(&refs_.count);
189
+ const RefCount::Value old_refs = refs_.get();
102
190
  gpr_log(GPR_INFO, "%s:%p %s:%d ref %" PRIdPTR " -> %" PRIdPTR " %s",
103
191
  trace_flag_->name(), this, location.file(), location.line(),
104
192
  old_refs, old_refs + 1, reason);
@@ -112,14 +200,14 @@ class RefCountedWithTracing {
112
200
  // friend of this class.
113
201
 
114
202
  void Unref() {
115
- if (gpr_unref(&refs_)) {
203
+ if (refs_.Unref()) {
116
204
  Delete(static_cast<Child*>(this));
117
205
  }
118
206
  }
119
207
 
120
208
  void Unref(const DebugLocation& location, const char* reason) {
121
209
  if (location.Log() && trace_flag_ != nullptr && trace_flag_->enabled()) {
122
- gpr_atm old_refs = gpr_atm_no_barrier_load(&refs_.count);
210
+ const RefCount::Value old_refs = refs_.get();
123
211
  gpr_log(GPR_INFO, "%s:%p %s:%d unref %" PRIdPTR " -> %" PRIdPTR " %s",
124
212
  trace_flag_->name(), this, location.file(), location.line(),
125
213
  old_refs, old_refs - 1, reason);
@@ -140,26 +228,25 @@ class RefCountedWithTracing {
140
228
  : RefCountedWithTracing(static_cast<TraceFlag*>(nullptr)) {}
141
229
 
142
230
  explicit RefCountedWithTracing(TraceFlag* trace_flag)
143
- : trace_flag_(trace_flag) {
144
- gpr_ref_init(&refs_, 1);
145
- }
231
+ : trace_flag_(trace_flag) {}
146
232
 
147
233
  #ifdef NDEBUG
148
234
  explicit RefCountedWithTracing(DebugOnlyTraceFlag* trace_flag)
149
235
  : RefCountedWithTracing() {}
150
236
  #endif
151
237
 
152
- virtual ~RefCountedWithTracing() {}
238
+ // Note: Depending on the Impl used, this dtor can be implicitly virtual.
239
+ ~RefCountedWithTracing() = default;
153
240
 
154
241
  private:
155
242
  // Allow RefCountedPtr<> to access IncrementRefCount().
156
243
  template <typename T>
157
244
  friend class RefCountedPtr;
158
245
 
159
- void IncrementRefCount() { gpr_ref(&refs_); }
246
+ void IncrementRefCount() { refs_.Ref(); }
160
247
 
161
248
  TraceFlag* trace_flag_ = nullptr;
162
- gpr_refcount refs_;
249
+ RefCount refs_;
163
250
  };
164
251
 
165
252
  } // namespace grpc_core