riak-client 1.4.5 → 2.0.0.rc1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +2 -1
- data/Gemfile +0 -1
- data/{LICENSE → LICENSE.md} +0 -0
- data/README.markdown +211 -66
- data/RELEASE_NOTES.md +22 -47
- data/Rakefile +45 -0
- data/lib/riak.rb +1 -1
- data/lib/riak/bucket.rb +2 -2
- data/lib/riak/client.rb +22 -195
- data/lib/riak/client/beefcake/crdt_loader.rb +127 -0
- data/lib/riak/client/beefcake/crdt_operator.rb +222 -0
- data/lib/riak/client/beefcake/footer +4 -0
- data/lib/riak/client/beefcake/header +6 -0
- data/lib/riak/client/beefcake/message_codes.rb +29 -0
- data/lib/riak/client/beefcake/message_overlay.rb +61 -0
- data/lib/riak/client/beefcake/messages.rb +733 -371
- data/lib/riak/client/beefcake/object_methods.rb +1 -1
- data/lib/riak/client/beefcake/protocol.rb +105 -0
- data/lib/riak/client/beefcake/socket.rb +243 -0
- data/lib/riak/client/beefcake_protobuffs_backend.rb +262 -122
- data/lib/riak/client/node.rb +4 -75
- data/lib/riak/client/protobuffs_backend.rb +6 -14
- data/lib/riak/client/search.rb +0 -64
- data/lib/riak/client/yokozuna.rb +52 -0
- data/lib/riak/counter.rb +1 -1
- data/lib/riak/crdt.rb +21 -0
- data/lib/riak/crdt/base.rb +97 -0
- data/lib/riak/crdt/batch_counter.rb +19 -0
- data/lib/riak/crdt/batch_map.rb +41 -0
- data/lib/riak/crdt/counter.rb +71 -0
- data/lib/riak/crdt/inner_counter.rb +74 -0
- data/lib/riak/crdt/inner_flag.rb +42 -0
- data/lib/riak/crdt/inner_map.rb +53 -0
- data/lib/riak/crdt/inner_register.rb +26 -0
- data/lib/riak/crdt/inner_set.rb +95 -0
- data/lib/riak/crdt/map.rb +88 -0
- data/lib/riak/crdt/operation.rb +19 -0
- data/lib/riak/crdt/set.rb +156 -0
- data/lib/riak/crdt/typed_collection.rb +131 -0
- data/lib/riak/errors/base.rb +9 -0
- data/lib/riak/errors/connection_error.rb +44 -0
- data/lib/riak/errors/crdt_error.rb +18 -0
- data/lib/riak/errors/failed_request.rb +56 -0
- data/lib/riak/errors/protobuffs_error.rb +11 -0
- data/lib/riak/i18n.rb +2 -0
- data/lib/riak/json.rb +1 -1
- data/lib/riak/locale/en.yml +26 -1
- data/lib/riak/locale/fr.yml +0 -1
- data/lib/riak/map_reduce.rb +1 -1
- data/lib/riak/map_reduce/results.rb +1 -1
- data/lib/riak/multiget.rb +1 -2
- data/lib/riak/rcontent.rb +8 -3
- data/lib/riak/robject.rb +2 -8
- data/lib/riak/secondary_index.rb +4 -4
- data/lib/riak/serializers.rb +1 -1
- data/lib/riak/util/escape.rb +3 -5
- data/lib/riak/version.rb +1 -1
- data/lib/riak/walk_spec.rb +7 -3
- data/riak-client.gemspec +10 -8
- data/spec/fixtures/bitcask.txt +25 -0
- data/spec/integration/riak/bucket_types_spec.rb +61 -0
- data/spec/integration/riak/counters_spec.rb +17 -32
- data/spec/integration/riak/crdt_spec.rb +181 -0
- data/spec/integration/riak/crdt_validation/map_spec.rb +63 -0
- data/spec/integration/riak/crdt_validation/set_spec.rb +122 -0
- data/spec/integration/riak/protobuffs_backends_spec.rb +9 -26
- data/spec/integration/riak/security_spec.rb +94 -0
- data/spec/integration/riak/threading_spec.rb +24 -67
- data/spec/integration/yokozuna/index_spec.rb +61 -0
- data/spec/integration/yokozuna/queries_spec.rb +116 -0
- data/spec/integration/yokozuna/schema_spec.rb +49 -0
- data/spec/riak/beefcake_protobuffs_backend/crdt_operator_spec.rb +222 -0
- data/spec/riak/beefcake_protobuffs_backend/object_methods_spec.rb +4 -4
- data/spec/riak/beefcake_protobuffs_backend/protocol_spec.rb +189 -0
- data/spec/riak/beefcake_protobuffs_backend/socket_spec.rb +151 -0
- data/spec/riak/beefcake_protobuffs_backend_spec.rb +68 -106
- data/spec/riak/bucket_spec.rb +81 -77
- data/spec/riak/client_spec.rb +43 -340
- data/spec/riak/core_ext/to_param_spec.rb +2 -2
- data/spec/riak/counter_spec.rb +20 -20
- data/spec/riak/crdt/counter_spec.rb +52 -0
- data/spec/riak/crdt/inner_counter_spec.rb +21 -0
- data/spec/riak/crdt/inner_flag_spec.rb +39 -0
- data/spec/riak/crdt/inner_map_spec.rb +47 -0
- data/spec/riak/crdt/inner_register_spec.rb +40 -0
- data/spec/riak/crdt/inner_set_spec.rb +33 -0
- data/spec/riak/crdt/map_spec.rb +77 -0
- data/spec/riak/crdt/set_spec.rb +58 -0
- data/spec/riak/crdt/shared_examples.rb +74 -0
- data/spec/riak/crdt/typed_collection_spec.rb +231 -0
- data/spec/riak/escape_spec.rb +33 -37
- data/spec/riak/feature_detection_spec.rb +45 -45
- data/spec/riak/index_collection_spec.rb +12 -12
- data/spec/riak/link_spec.rb +34 -34
- data/spec/riak/list_buckets_spec.rb +7 -7
- data/spec/riak/map_reduce/filter_builder_spec.rb +6 -6
- data/spec/riak/map_reduce/phase_spec.rb +35 -35
- data/spec/riak/map_reduce_spec.rb +89 -87
- data/spec/riak/multiget_spec.rb +20 -15
- data/spec/riak/node_spec.rb +5 -152
- data/spec/riak/robject_spec.rb +95 -108
- data/spec/riak/search_spec.rb +17 -139
- data/spec/riak/secondary_index_spec.rb +49 -49
- data/spec/riak/serializers_spec.rb +9 -9
- data/spec/riak/stamp_spec.rb +9 -9
- data/spec/riak/walk_spec_spec.rb +46 -46
- data/spec/spec_helper.rb +14 -22
- data/spec/support/certs/README.md +13 -0
- data/spec/support/certs/ca.crt +22 -0
- data/spec/support/certs/client.crt +95 -0
- data/spec/support/certs/client.key +27 -0
- data/spec/support/certs/empty_ca.crt +21 -0
- data/spec/support/certs/server.crl +13 -0
- data/spec/support/certs/server.crt +95 -0
- data/spec/support/certs/server.key +27 -0
- data/spec/support/integration_setup.rb +1 -1
- data/spec/support/search_corpus_setup.rb +29 -8
- data/spec/support/test_client.rb +46 -0
- data/spec/support/test_client.yml.example +10 -0
- data/spec/support/unified_backend_examples.rb +104 -83
- data/spec/support/version_filter.rb +2 -2
- data/spec/support/wait_until.rb +14 -0
- metadata +134 -132
- data/erl_src/riak_kv_test014_backend.beam +0 -0
- data/erl_src/riak_kv_test014_backend.erl +0 -189
- data/erl_src/riak_kv_test_backend.beam +0 -0
- data/erl_src/riak_kv_test_backend.erl +0 -731
- data/erl_src/riak_search_test_backend.beam +0 -0
- data/erl_src/riak_search_test_backend.erl +0 -175
- data/lib/riak/client/excon_backend.rb +0 -172
- data/lib/riak/client/http_backend.rb +0 -413
- data/lib/riak/client/http_backend/bucket_streamer.rb +0 -15
- data/lib/riak/client/http_backend/chunked_json_streamer.rb +0 -42
- data/lib/riak/client/http_backend/configuration.rb +0 -227
- data/lib/riak/client/http_backend/key_streamer.rb +0 -15
- data/lib/riak/client/http_backend/object_methods.rb +0 -114
- data/lib/riak/client/http_backend/request_headers.rb +0 -34
- data/lib/riak/client/http_backend/transport_methods.rb +0 -201
- data/lib/riak/client/instrumentation.rb +0 -25
- data/lib/riak/client/net_http_backend.rb +0 -82
- data/lib/riak/cluster.rb +0 -151
- data/lib/riak/failed_request.rb +0 -81
- data/lib/riak/instrumentation.rb +0 -6
- data/lib/riak/node.rb +0 -40
- data/lib/riak/node/configuration.rb +0 -304
- data/lib/riak/node/console.rb +0 -133
- data/lib/riak/node/control.rb +0 -207
- data/lib/riak/node/defaults.rb +0 -85
- data/lib/riak/node/generation.rb +0 -127
- data/lib/riak/node/log.rb +0 -34
- data/lib/riak/node/version.rb +0 -29
- data/lib/riak/search.rb +0 -3
- data/lib/riak/test_server.rb +0 -89
- data/lib/riak/util/headers.rb +0 -32
- data/lib/riak/util/multipart.rb +0 -52
- data/lib/riak/util/multipart/stream_parser.rb +0 -62
- data/spec/fixtures/munchausen.txt +0 -1033
- data/spec/integration/riak/cluster_spec.rb +0 -88
- data/spec/integration/riak/http_backends_spec.rb +0 -180
- data/spec/integration/riak/node_spec.rb +0 -170
- data/spec/integration/riak/test_server_spec.rb +0 -57
- data/spec/riak/excon_backend_spec.rb +0 -102
- data/spec/riak/headers_spec.rb +0 -21
- data/spec/riak/http_backend/configuration_spec.rb +0 -273
- data/spec/riak/http_backend/object_methods_spec.rb +0 -243
- data/spec/riak/http_backend/transport_methods_spec.rb +0 -97
- data/spec/riak/http_backend_spec.rb +0 -367
- data/spec/riak/instrumentation_spec.rb +0 -167
- data/spec/riak/multipart_spec.rb +0 -23
- data/spec/riak/net_http_backend_spec.rb +0 -15
- data/spec/riak/stream_parser_spec.rb +0 -53
- data/spec/support/drb_mock_server.rb +0 -39
- data/spec/support/http_backend_implementation_examples.rb +0 -253
- data/spec/support/mock_server.rb +0 -81
- data/spec/support/mocks.rb +0 -4
- data/spec/support/riak_test.rb +0 -77
- data/spec/support/sometimes.rb +0 -46
- data/spec/support/test_server.rb +0 -61
- data/spec/support/test_server.yml.example +0 -14
Binary file
|
@@ -1,175 +0,0 @@
|
|
1
|
-
%% -------------------------------------------------------------------
|
2
|
-
%%
|
3
|
-
%% Copyright (c) 2007-2010 Basho Technologies, Inc. All Rights Reserved.
|
4
|
-
%%
|
5
|
-
%% -------------------------------------------------------------------
|
6
|
-
|
7
|
-
-module(riak_search_test_backend).
|
8
|
-
-behavior(riak_search_backend).
|
9
|
-
|
10
|
-
-export([
|
11
|
-
reset/0,
|
12
|
-
start/2,
|
13
|
-
stop/1,
|
14
|
-
index/2,
|
15
|
-
delete/2,
|
16
|
-
stream/6,
|
17
|
-
range/8,
|
18
|
-
info/5,
|
19
|
-
fold/3,
|
20
|
-
is_empty/1,
|
21
|
-
drop/1
|
22
|
-
]).
|
23
|
-
-export([
|
24
|
-
stream_results/3
|
25
|
-
]).
|
26
|
-
|
27
|
-
-include_lib("riak_search/include/riak_search.hrl").
|
28
|
-
-define(T(P), list_to_atom("rs" ++ integer_to_list(P))).
|
29
|
-
-record(state, {partition, table}).
|
30
|
-
|
31
|
-
reset() ->
|
32
|
-
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
|
33
|
-
[ catch ets:delete_all_objects(?T(P)) ||
|
34
|
-
P <- riak_core_ring:my_indices(Ring) ],
|
35
|
-
riak_search_config:clear(),
|
36
|
-
ok.
|
37
|
-
|
38
|
-
start(Partition, _Config) ->
|
39
|
-
Table = ets:new(?T(Partition),
|
40
|
-
[named_table, public, ordered_set]),
|
41
|
-
{ok, #state{partition=Partition, table=Table}}.
|
42
|
-
|
43
|
-
stop(State) ->
|
44
|
-
maybe_delete(State).
|
45
|
-
|
46
|
-
index(IFTVPKList, #state{table=Table}=State) ->
|
47
|
-
lists:foreach(
|
48
|
-
fun({I, F, T, V, P, K}) ->
|
49
|
-
Key = {b(I), b(F), b(T), b(V)},
|
50
|
-
case ets:lookup(Table, Key) of
|
51
|
-
[{_, _, ExistingKeyClock}] ->
|
52
|
-
if ExistingKeyClock > K ->
|
53
|
-
%% stored data is newer
|
54
|
-
ok;
|
55
|
-
true ->
|
56
|
-
%% stored data is older
|
57
|
-
ets:update_element(Table, Key,
|
58
|
-
[{2, P},{3, K}])
|
59
|
-
end;
|
60
|
-
[] ->
|
61
|
-
ets:insert(Table, {Key, P, K})
|
62
|
-
end
|
63
|
-
end,
|
64
|
-
IFTVPKList),
|
65
|
-
{reply, {indexed, node()}, State}.
|
66
|
-
|
67
|
-
delete(IFTVKList, State) ->
|
68
|
-
Table = State#state.table,
|
69
|
-
lists:foreach(fun(IFTVK) -> delete_fun(IFTVK, Table) end, IFTVKList),
|
70
|
-
{reply, {deleted, node()}, State}.
|
71
|
-
|
72
|
-
delete_fun({I, F, T, V, K}, Table) ->
|
73
|
-
Key = {b(I), b(F), b(T), b(V)},
|
74
|
-
case ets:lookup(Table, Key) of
|
75
|
-
[{Key, _Props, ExistingKeyClock}] ->
|
76
|
-
if ExistingKeyClock > K ->
|
77
|
-
%% stored data is newer
|
78
|
-
ok;
|
79
|
-
true ->
|
80
|
-
%% stored data is older
|
81
|
-
ets:delete(Table, Key)
|
82
|
-
end;
|
83
|
-
[] ->
|
84
|
-
ok
|
85
|
-
end;
|
86
|
-
delete_fun({I, F, T, V, _P, K}, Table) ->
|
87
|
-
%% copied idea from merge_index_backend
|
88
|
-
%% other operations include Props, though delete shouldn't
|
89
|
-
delete_fun({I, F, T, V, K}, Table).
|
90
|
-
|
91
|
-
info(Index, Field, Term, Sender, State) ->
|
92
|
-
Count = ets:select_count(State#state.table,
|
93
|
-
[{{{b(Index), b(Field), b(Term), '_'},
|
94
|
-
'_', '_'},
|
95
|
-
[],[true]}]),
|
96
|
-
riak_search_backend:info_response(Sender, [{Term, node(), Count}]),
|
97
|
-
noreply.
|
98
|
-
|
99
|
-
-define(STREAM_SIZE, 100).
|
100
|
-
|
101
|
-
range(Index, Field, StartTerm, EndTerm, _Size, FilterFun, Sender, State) ->
|
102
|
-
ST = b(StartTerm),
|
103
|
-
ET = b(EndTerm),
|
104
|
-
spawn(riak_search_ets_backend, stream_results,
|
105
|
-
[Sender,
|
106
|
-
FilterFun,
|
107
|
-
ets:select(State#state.table,
|
108
|
-
[{{{b(Index), b(Field), '$1', '$2'}, '$3', '_'},
|
109
|
-
[{'>=', '$1', ST}, {'=<', '$1', ET}],
|
110
|
-
[{{'$2', '$3'}}]}],
|
111
|
-
?STREAM_SIZE)]),
|
112
|
-
noreply.
|
113
|
-
|
114
|
-
stream(Index, Field, Term, FilterFun, Sender, State) ->
|
115
|
-
spawn(riak_search_ets_backend, stream_results,
|
116
|
-
[Sender,
|
117
|
-
FilterFun,
|
118
|
-
ets:select(State#state.table,
|
119
|
-
[{{{b(Index), b(Field), b(Term), '$1'}, '$2', '_'},
|
120
|
-
[], [{{'$1', '$2'}}]}],
|
121
|
-
?STREAM_SIZE)]),
|
122
|
-
noreply.
|
123
|
-
|
124
|
-
stream_results(Sender, FilterFun, {Results0, Continuation}) ->
|
125
|
-
case lists:filter(fun({V,P}) -> FilterFun(V, P) end, Results0) of
|
126
|
-
[] ->
|
127
|
-
ok;
|
128
|
-
Results ->
|
129
|
-
riak_search_backend:response_results(Sender, Results)
|
130
|
-
end,
|
131
|
-
stream_results(Sender, FilterFun, ets:select(Continuation));
|
132
|
-
stream_results(Sender, _, '$end_of_table') ->
|
133
|
-
riak_search_backend:response_done(Sender).
|
134
|
-
|
135
|
-
fold(FoldFun, Acc, State) ->
|
136
|
-
Fun = fun({{I,F,T,V},P,K}, {OuterAcc, {{I,{F,T}},InnerAcc}}) ->
|
137
|
-
%% same IFT, just accumulate doc/props/clock
|
138
|
-
{OuterAcc, {{I,{F,T}},[{V,P,K}|InnerAcc]}};
|
139
|
-
({{I,F,T,V},P,K}, {OuterAcc, {FoldKey, VPKList}}) ->
|
140
|
-
%% finished a string of IFT, send it off
|
141
|
-
%% (sorted order is assumed)
|
142
|
-
NewOuterAcc = FoldFun(FoldKey, VPKList, OuterAcc),
|
143
|
-
{NewOuterAcc, {{I,{F,T}},[{V,P,K}]}};
|
144
|
-
({{I,F,T,V},P,K}, {OuterAcc, undefined}) ->
|
145
|
-
%% first round through the fold - just start building
|
146
|
-
{OuterAcc, {{I,{F,T}},[{V,P,K}]}}
|
147
|
-
end,
|
148
|
-
{OuterAcc0, Final} = ets:foldl(Fun, {Acc, undefined}, State#state.table),
|
149
|
-
OuterAcc = case Final of
|
150
|
-
{FoldKey, VPKList} ->
|
151
|
-
%% one last IFT to send off
|
152
|
-
FoldFun(FoldKey, VPKList, OuterAcc0);
|
153
|
-
undefined ->
|
154
|
-
%% this partition was empty
|
155
|
-
OuterAcc0
|
156
|
-
end,
|
157
|
-
{reply, OuterAcc, State}.
|
158
|
-
|
159
|
-
is_empty(State) ->
|
160
|
-
0 == ets:info(State#state.table, size).
|
161
|
-
|
162
|
-
drop(State) ->
|
163
|
-
maybe_delete(State).
|
164
|
-
|
165
|
-
maybe_delete(State) ->
|
166
|
-
case lists:member(State#state.table, ets:all()) of
|
167
|
-
true ->
|
168
|
-
ets:delete(State#state.table),
|
169
|
-
ok;
|
170
|
-
false ->
|
171
|
-
ok
|
172
|
-
end.
|
173
|
-
|
174
|
-
b(Binary) when is_binary(Binary) -> Binary;
|
175
|
-
b(List) when is_list(List) -> iolist_to_binary(List).
|
@@ -1,172 +0,0 @@
|
|
1
|
-
require 'riak/failed_request'
|
2
|
-
require 'riak/client/http_backend'
|
3
|
-
require 'riak/client/http_backend/request_headers'
|
4
|
-
|
5
|
-
module Riak
|
6
|
-
class Client
|
7
|
-
# An HTTP backend for Riak::Client that uses Wesley Beary's Excon
|
8
|
-
# HTTP library. Conforms to the Riak::Client::HTTPBackend
|
9
|
-
# interface.
|
10
|
-
class ExconBackend < HTTPBackend
|
11
|
-
def self.configured?
|
12
|
-
begin
|
13
|
-
require 'excon'
|
14
|
-
minimum_version?("0.5.7") && register_exceptions && handle_deprecations && patch_sockets
|
15
|
-
rescue LoadError
|
16
|
-
false
|
17
|
-
end
|
18
|
-
end
|
19
|
-
|
20
|
-
# Adds Excon's relevant internal exceptions to the rescuable
|
21
|
-
# network-related errors.
|
22
|
-
def self.register_exceptions
|
23
|
-
unless Client::NETWORK_ERRORS.include?(Excon::Errors::SocketError)
|
24
|
-
Client::NETWORK_ERRORS << Excon::Errors::SocketError
|
25
|
-
Client::NETWORK_ERRORS << Excon::Errors::Timeout if defined? Excon::Errors::Timeout
|
26
|
-
end
|
27
|
-
true
|
28
|
-
end
|
29
|
-
|
30
|
-
# Adjusts Excon's connection collection to allow multiple
|
31
|
-
# connections to the same host from the same Thread. Instead we
|
32
|
-
# use the Riak::Client::Pool to segregate connections.
|
33
|
-
# @note This can be changed when Excon has a proper pool of its own.
|
34
|
-
def self.patch_sockets
|
35
|
-
unless defined? @@patched
|
36
|
-
::Excon::Connection.class_eval do
|
37
|
-
def sockets
|
38
|
-
@sockets ||= {}
|
39
|
-
end
|
40
|
-
end
|
41
|
-
end
|
42
|
-
@@patched = true
|
43
|
-
end
|
44
|
-
|
45
|
-
# Defines instance methods that handle changes in the Excon API
|
46
|
-
# across different versions.
|
47
|
-
def self.handle_deprecations
|
48
|
-
# Define #make_request
|
49
|
-
unless method_defined?(:make_request)
|
50
|
-
if minimum_version?("0.10.2")
|
51
|
-
def make_request(params, block)
|
52
|
-
params[:response_block] = block if block
|
53
|
-
connection.request(params)
|
54
|
-
end
|
55
|
-
else
|
56
|
-
def make_request(params, block)
|
57
|
-
response = connection.request(params, &block)
|
58
|
-
end
|
59
|
-
end
|
60
|
-
private :make_request
|
61
|
-
end
|
62
|
-
|
63
|
-
# Define #configure_ssl
|
64
|
-
unless method_defined?(:configure_ssl)
|
65
|
-
if minimum_version?("0.9.6")
|
66
|
-
def configure_ssl
|
67
|
-
Excon.defaults[:ssl_verify_peer] = (@node.ssl_options[:verify_mode].to_s === "peer")
|
68
|
-
Excon.defaults[:ssl_ca_path] = @node.ssl_options[:ca_path] if @node.ssl_options[:ca_path]
|
69
|
-
end
|
70
|
-
else
|
71
|
-
def configure_ssl
|
72
|
-
Excon.ssl_verify_peer = (@node.ssl_options[:verify_mode].to_s === "peer")
|
73
|
-
Excon.ssl_ca_path = @node.ssl_options[:ca_path] if @node.ssl_options[:ca_path]
|
74
|
-
end
|
75
|
-
end
|
76
|
-
private :configure_ssl
|
77
|
-
end
|
78
|
-
true
|
79
|
-
end
|
80
|
-
|
81
|
-
# Returns true if the Excon library is at least the given
|
82
|
-
# version. This is used inside the backend to check how to
|
83
|
-
# provide certain request and configuration options.
|
84
|
-
def self.minimum_version?(version)
|
85
|
-
Gem::Version.new(Excon::VERSION) >= Gem::Version.new(version)
|
86
|
-
end
|
87
|
-
|
88
|
-
# Sets the connect timeout applied to the Excon connection
|
89
|
-
# Increase this if you have very long request times.
|
90
|
-
def self.connect_timeout=(timeout)
|
91
|
-
@connect_timeout = timeout
|
92
|
-
end
|
93
|
-
|
94
|
-
def self.connect_timeout
|
95
|
-
@connect_timeout ||= 4096
|
96
|
-
end
|
97
|
-
|
98
|
-
# Sets the read_timeout applied to the Excon connection
|
99
|
-
# Increase this if you have very long request times.
|
100
|
-
def self.read_timeout=(timeout)
|
101
|
-
@read_timeout = timeout
|
102
|
-
end
|
103
|
-
|
104
|
-
def self.read_timeout
|
105
|
-
@read_timeout ||= 4096
|
106
|
-
end
|
107
|
-
|
108
|
-
# Sets the write_timeout applied to the Excon connection
|
109
|
-
# Increase this if you have very long request times.
|
110
|
-
def self.write_timeout=(timeout)
|
111
|
-
@write_timeout = timeout
|
112
|
-
end
|
113
|
-
|
114
|
-
def self.write_timeout
|
115
|
-
@write_timeout ||= 4096
|
116
|
-
end
|
117
|
-
|
118
|
-
def teardown
|
119
|
-
connection.reset
|
120
|
-
end
|
121
|
-
|
122
|
-
private
|
123
|
-
def perform(method, uri, headers, expect, data=nil, &block)
|
124
|
-
configure_ssl if @node.ssl_enabled?
|
125
|
-
|
126
|
-
params = {
|
127
|
-
:method => method.to_s.upcase,
|
128
|
-
:headers => RequestHeaders.new(headers).to_hash,
|
129
|
-
:path => uri.path
|
130
|
-
}
|
131
|
-
params[:query] = uri.query if uri.query
|
132
|
-
params[:body] = data if [:put,:post].include?(method)
|
133
|
-
params[:idempotent] = (method != :post)
|
134
|
-
|
135
|
-
# Later versions of Excon pass multiple arguments to the block
|
136
|
-
block = lambda {|*args| yield args.first } if block_given?
|
137
|
-
|
138
|
-
response = make_request(params, block)
|
139
|
-
response_headers.initialize_http_header(response.headers)
|
140
|
-
|
141
|
-
if valid_response?(expect, response.status)
|
142
|
-
result = {:headers => response_headers.to_hash, :code => response.status}
|
143
|
-
if return_body?(method, response.status, block_given?)
|
144
|
-
result[:body] = response.body
|
145
|
-
end
|
146
|
-
result
|
147
|
-
else
|
148
|
-
raise HTTPFailedRequest.new(method, expect, response.status, response_headers.to_hash, response.body)
|
149
|
-
end
|
150
|
-
end
|
151
|
-
|
152
|
-
def connection
|
153
|
-
@connection ||= new_connection
|
154
|
-
end
|
155
|
-
|
156
|
-
def new_connection
|
157
|
-
params = { :read_timeout => self.class.read_timeout,
|
158
|
-
:write_timeout => self.class.write_timeout,
|
159
|
-
:connect_timeout => self.class.connect_timeout }
|
160
|
-
args = [ params ]
|
161
|
-
if self.class.minimum_version?("0.19.0")
|
162
|
-
params.merge!(:scheme => root_uri.scheme,
|
163
|
-
:host => root_uri.host,
|
164
|
-
:port => root_uri.port)
|
165
|
-
else
|
166
|
-
args.unshift root_uri.to_s
|
167
|
-
end
|
168
|
-
Excon::Connection.new(*args)
|
169
|
-
end
|
170
|
-
end
|
171
|
-
end
|
172
|
-
end
|
@@ -1,413 +0,0 @@
|
|
1
|
-
require 'riak/util/escape'
|
2
|
-
require 'riak/util/translation'
|
3
|
-
require 'riak/util/multipart'
|
4
|
-
require 'riak/util/multipart/stream_parser'
|
5
|
-
require 'riak/json'
|
6
|
-
require 'riak/client'
|
7
|
-
require 'riak/bucket'
|
8
|
-
require 'riak/robject'
|
9
|
-
require 'riak/client/http_backend/transport_methods'
|
10
|
-
require 'riak/client/http_backend/object_methods'
|
11
|
-
require 'riak/client/http_backend/configuration'
|
12
|
-
require 'riak/client/http_backend/key_streamer'
|
13
|
-
require 'riak/client/http_backend/bucket_streamer'
|
14
|
-
require 'riak/client/feature_detection'
|
15
|
-
|
16
|
-
module Riak
|
17
|
-
class Client
|
18
|
-
# The parent class for all backends that connect to Riak via
|
19
|
-
# HTTP. This class implements all of the universal backend API
|
20
|
-
# methods on behalf of subclasses, which need only implement the
|
21
|
-
# {TransportMethods#perform} method for library-specific
|
22
|
-
# semantics.
|
23
|
-
class HTTPBackend
|
24
|
-
include Util::Escape
|
25
|
-
include Util::Translation
|
26
|
-
include FeatureDetection
|
27
|
-
|
28
|
-
include TransportMethods
|
29
|
-
include ObjectMethods
|
30
|
-
include Configuration
|
31
|
-
|
32
|
-
# The Riak::Client that uses this backend
|
33
|
-
attr_reader :client
|
34
|
-
|
35
|
-
# The Riak::Client::Node that uses this backend
|
36
|
-
attr_reader :node
|
37
|
-
|
38
|
-
# Create an HTTPBackend for the Riak::Client.
|
39
|
-
# @param [Client] The client
|
40
|
-
# @param [Node] The node we're connecting to.
|
41
|
-
def initialize(client, node)
|
42
|
-
raise ArgumentError, t("client_type", :client => client) unless Client === client
|
43
|
-
raise ArgumentError, t("node_type", :node => node) unless Node === node
|
44
|
-
@client = client
|
45
|
-
@node = node
|
46
|
-
end
|
47
|
-
|
48
|
-
# Pings the server
|
49
|
-
# @return [true,false] whether the server is available
|
50
|
-
def ping
|
51
|
-
get(200, ping_path)
|
52
|
-
true
|
53
|
-
rescue
|
54
|
-
false
|
55
|
-
end
|
56
|
-
|
57
|
-
# Fetches an object by bucket/key
|
58
|
-
# @param [Bucket, String] bucket the bucket where the object is
|
59
|
-
# stored
|
60
|
-
# @param [String] key the key of the object
|
61
|
-
# @param [Hash] options request quorums
|
62
|
-
# @option options [Fixnum, String, Symbol] :r the read quorum for the
|
63
|
-
# request - how many nodes should concur on the read
|
64
|
-
# @option options [Fixnum, String, Symbol] :pr the "primary"
|
65
|
-
# read quorum for the request - how many primary partitions
|
66
|
-
# must be available
|
67
|
-
# @return [RObject] the fetched object
|
68
|
-
def fetch_object(bucket, key, options={})
|
69
|
-
bucket = Bucket.new(client, bucket) if String === bucket
|
70
|
-
method = options.delete(:head) ? :head : :get
|
71
|
-
response = send(method, [200,300], object_path(bucket.name, key, options))
|
72
|
-
load_object(RObject.new(bucket, key), response)
|
73
|
-
end
|
74
|
-
|
75
|
-
# Reloads the data for a given RObject, a special case of {#fetch_object}.
|
76
|
-
def reload_object(robject, options={})
|
77
|
-
response = get([200,300,304], object_path(robject.bucket.name, robject.key, options), reload_headers(robject))
|
78
|
-
if response[:code].to_i == 304
|
79
|
-
robject
|
80
|
-
else
|
81
|
-
load_object(robject, response)
|
82
|
-
end
|
83
|
-
end
|
84
|
-
|
85
|
-
# Stores an object
|
86
|
-
# @param [RObject] robject the object to store
|
87
|
-
# @param [Hash] options quorum and storage options
|
88
|
-
# @option options [true,false] :returnbody (false) whether to update the object
|
89
|
-
# after write with the new value
|
90
|
-
# @option options [Fixnum, String, Symbol] :w the write quorum
|
91
|
-
# @option options [Fixnum, String, Symbol] :pw the "primary"
|
92
|
-
# write quorum - how many primary partitions must be available
|
93
|
-
# @option options [Fixnum, String, Symbol] :dw the durable write quorum
|
94
|
-
def store_object(robject, options={})
|
95
|
-
method, codes = if robject.key.present?
|
96
|
-
[:put, [200,204,300]]
|
97
|
-
else
|
98
|
-
[:post, 201]
|
99
|
-
end
|
100
|
-
response = send(method, codes, object_path(robject.bucket.name, robject.key, options), robject.raw_data, store_headers(robject))
|
101
|
-
load_object(robject, response) if options[:returnbody]
|
102
|
-
end
|
103
|
-
|
104
|
-
# Deletes an object
|
105
|
-
# @param [Bucket, String] bucket the bucket where the object
|
106
|
-
# lives
|
107
|
-
# @param [String] key the key where the object lives
|
108
|
-
# @param [Hash] options quorum and delete options
|
109
|
-
# @options options [Fixnum, String, Symbol] :rw the read/write quorum for
|
110
|
-
# the request
|
111
|
-
# @options options [String] :vclock the vector clock of the
|
112
|
-
# object to be deleted
|
113
|
-
def delete_object(bucket, key, options={})
|
114
|
-
bucket = bucket.name if Bucket === bucket
|
115
|
-
vclock = options.delete(:vclock)
|
116
|
-
headers = vclock ? {"X-Riak-VClock" => vclock} : {}
|
117
|
-
delete([204, 404], object_path(bucket, key, options), headers)
|
118
|
-
end
|
119
|
-
|
120
|
-
# Fetches a counter
|
121
|
-
# @param [Bucket, String] bucket the bucket where the counter exists
|
122
|
-
# @param [String] key the key for the counter
|
123
|
-
# @param [Hash] options unused
|
124
|
-
def get_counter(bucket, key, options={})
|
125
|
-
bucket = bucket.name if bucket.is_a? Bucket
|
126
|
-
response = get([200, 404], counter_path(bucket, key, options))
|
127
|
-
case response[:code]
|
128
|
-
when 200
|
129
|
-
return response[:body].to_i
|
130
|
-
when 404
|
131
|
-
return 0
|
132
|
-
end
|
133
|
-
end
|
134
|
-
|
135
|
-
# Updates a counter
|
136
|
-
# @param [Bucket, String] bucket the bucket where the counter exists
|
137
|
-
# @param [String] key the key for the counter
|
138
|
-
# @param [Integer] amount how much to increment the counter
|
139
|
-
# @param [Hash] options unused
|
140
|
-
def post_counter(bucket, key, amount, options={})
|
141
|
-
bucket = bucket.name if bucket.is_a? Bucket
|
142
|
-
response = post([200, 204], counter_path(bucket, key, options), amount.to_s)
|
143
|
-
case response[:code]
|
144
|
-
when 200
|
145
|
-
return response[:body].to_i
|
146
|
-
when 204
|
147
|
-
return 0 if options[:return_value]
|
148
|
-
return nil
|
149
|
-
end
|
150
|
-
end
|
151
|
-
|
152
|
-
# Fetches bucket properties
|
153
|
-
# @param [Bucket, String] bucket the bucket properties to fetch
|
154
|
-
# @return [Hash] bucket properties
|
155
|
-
def get_bucket_props(bucket)
|
156
|
-
bucket = bucket.name if Bucket === bucket
|
157
|
-
response = get(200, bucket_properties_path(bucket))
|
158
|
-
JSON.parse(response[:body])['props']
|
159
|
-
end
|
160
|
-
|
161
|
-
# Sets bucket properties
|
162
|
-
# @param [Bucket, String] bucket the bucket to set properties on
|
163
|
-
# @param [Hash] properties the properties to set
|
164
|
-
def set_bucket_props(bucket, props)
|
165
|
-
bucket = bucket.name if Bucket === bucket
|
166
|
-
body = {'props' => props}.to_json
|
167
|
-
put(204, bucket_properties_path(bucket), body, {"Content-Type" => "application/json"})
|
168
|
-
end
|
169
|
-
|
170
|
-
# Clears bucket properties
|
171
|
-
# @param [Bucket, String] bucket the bucket to clear properties
|
172
|
-
# on
|
173
|
-
# @return [true, false] whether the operation succeeded
|
174
|
-
# @note false will be returned if the operation is not supported
|
175
|
-
# on the connected node
|
176
|
-
def clear_bucket_props(bucket)
|
177
|
-
if http_props_clearable?
|
178
|
-
bucket = bucket.name if Bucket === bucket
|
179
|
-
delete(204, bucket_properties_path(bucket))
|
180
|
-
true
|
181
|
-
else
|
182
|
-
false
|
183
|
-
end
|
184
|
-
end
|
185
|
-
|
186
|
-
# List keys in a bucket
|
187
|
-
# @param [Bucket, String] bucket the bucket to fetch the keys
|
188
|
-
# for
|
189
|
-
# @yield [Array<String>] a list of keys from the current
|
190
|
-
# streamed chunk
|
191
|
-
# @return [Array<String>] the list of keys, if no block was given
|
192
|
-
def list_keys(bucket, options={}, &block)
|
193
|
-
bucket = bucket.name if Bucket === bucket
|
194
|
-
if block_given?
|
195
|
-
stream_opts = options.merge keys: 'stream'
|
196
|
-
get(200, key_list_path(bucket, stream_opts), {}, &KeyStreamer.new(block))
|
197
|
-
else
|
198
|
-
list_opts = options.merge keys: true
|
199
|
-
response = get(200, key_list_path(bucket, list_opts))
|
200
|
-
obj = JSON.parse(response[:body])
|
201
|
-
obj && obj['keys'].map {|k| unescape(k) }
|
202
|
-
end
|
203
|
-
end
|
204
|
-
|
205
|
-
# Lists known buckets
|
206
|
-
# @return [Array<String>] the list of buckets
|
207
|
-
def list_buckets(options = {}, &block)
|
208
|
-
if block_given?
|
209
|
-
get(200, bucket_list_path(options.merge(stream: true)), &BucketStreamer.new(block))
|
210
|
-
return
|
211
|
-
end
|
212
|
-
|
213
|
-
response = get(200, bucket_list_path)
|
214
|
-
JSON.parse(response[:body])['buckets']
|
215
|
-
end
|
216
|
-
|
217
|
-
# Performs a MapReduce query.
|
218
|
-
# @param [MapReduce] mr the query to perform
|
219
|
-
# @yield [Fixnum, Object] the phase number and single result
|
220
|
-
# from the phase
|
221
|
-
# @return [Array<Object>] the list of results, if no block was
|
222
|
-
# given
|
223
|
-
def mapred(mr)
|
224
|
-
raise MapReduceError.new(t("empty_map_reduce_query")) if mr.query.empty? && !mapred_phaseless?
|
225
|
-
if block_given?
|
226
|
-
parser = Riak::Util::Multipart::StreamParser.new do |response|
|
227
|
-
result = JSON.parse(response[:body])
|
228
|
-
yield result['phase'], result['data']
|
229
|
-
end
|
230
|
-
post(200, mapred_path({:chunked => true}), mr.to_json, {"Content-Type" => "application/json", "Accept" => "application/json"}, &parser)
|
231
|
-
nil
|
232
|
-
else
|
233
|
-
results = MapReduce::Results.new(mr)
|
234
|
-
parser = Riak::Util::Multipart::StreamParser.new do |response|
|
235
|
-
result = JSON.parse(response[:body])
|
236
|
-
results.add result['phase'], result['data']
|
237
|
-
end
|
238
|
-
post(200, mapred_path({:chunked => true}), mr.to_json, {"Content-Type" => "application/json", "Accept" => "application/json"}, &parser)
|
239
|
-
results.report
|
240
|
-
end
|
241
|
-
end
|
242
|
-
|
243
|
-
# Gets health statistics
|
244
|
-
# @return [Hash] information about the server, including stats
|
245
|
-
def stats
|
246
|
-
response = get(200, stats_path)
|
247
|
-
JSON.parse(response[:body])
|
248
|
-
end
|
249
|
-
|
250
|
-
# Performs a link-walking query
|
251
|
-
# @param [RObject] robject the object to start at
|
252
|
-
# @param [Array<WalkSpec>] walk_specs a list of walk
|
253
|
-
# specifications to process
|
254
|
-
# @return [Array<Array<RObject>>] a list of the matched objects,
|
255
|
-
# grouped by phase
|
256
|
-
def link_walk(robject, walk_specs)
|
257
|
-
response = get(200, link_walk_path(robject.bucket.name, robject.key, walk_specs))
|
258
|
-
if boundary = Util::Multipart.extract_boundary(response[:headers]['content-type'].first)
|
259
|
-
Util::Multipart.parse(response[:body], boundary).map do |group|
|
260
|
-
group.map do |obj|
|
261
|
-
if obj[:headers] && !obj[:headers]['x-riak-deleted'] && !obj[:body].blank? && obj[:headers]['location']
|
262
|
-
link = Riak::Link.new(obj[:headers]['location'].first, "")
|
263
|
-
load_object(RObject.new(client.bucket(link.bucket), link.key), obj)
|
264
|
-
end
|
265
|
-
end.compact
|
266
|
-
end
|
267
|
-
else
|
268
|
-
[]
|
269
|
-
end
|
270
|
-
end
|
271
|
-
|
272
|
-
# Performs a secondary-index query.
|
273
|
-
# @param [String, Bucket] bucket the bucket to query
|
274
|
-
# @param [String] index the index to query
|
275
|
-
# @param [String, Integer, Range] query the equality query or
|
276
|
-
# range query to perform
|
277
|
-
# @return [Array<String>] a list of keys matching the query
|
278
|
-
def get_index(bucket, index, query, options={})
|
279
|
-
bucket = bucket.name if Bucket === bucket
|
280
|
-
path = case query
|
281
|
-
when Range
|
282
|
-
raise ArgumentError, t('invalid_index_query', :value => query.inspect) unless String === query.begin || Integer === query.end
|
283
|
-
index_range_path(bucket, index, query.begin, query.end, options)
|
284
|
-
when String, Integer
|
285
|
-
index_eq_path(bucket, index, query, options)
|
286
|
-
else
|
287
|
-
raise ArgumentError, t('invalid_index_query', :value => query.inspect)
|
288
|
-
end
|
289
|
-
if block_given?
|
290
|
-
parser = Riak::Util::Multipart::StreamParser.new do |response|
|
291
|
-
result = JSON.parse response[:body]
|
292
|
-
|
293
|
-
yield result['keys'] || result['results'] || []
|
294
|
-
end
|
295
|
-
get(200, path, &parser)
|
296
|
-
else
|
297
|
-
begin
|
298
|
-
response = get(200, path)
|
299
|
-
Riak::IndexCollection.new_from_json response[:body]
|
300
|
-
rescue HTTPFailedRequest => e
|
301
|
-
if match = e.message.match(/indexes_not_supported,(\w+)/)
|
302
|
-
raise HTTPFailedRequest.new :get, 200, 500, e.headers, t('index.wrong_backend', backend: match[1])
|
303
|
-
end
|
304
|
-
end
|
305
|
-
end
|
306
|
-
end
|
307
|
-
|
308
|
-
# (Riak Search) Performs a search query
|
309
|
-
# @param [String,nil] index the index to query, or nil for the
|
310
|
-
# default
|
311
|
-
# @param [String] query the Lucene query to perform
|
312
|
-
# @param [Hash] options query options
|
313
|
-
# @see Client#search
|
314
|
-
def search(index, query, options={})
|
315
|
-
response = get(200, solr_select_path(index, query, options.stringify_keys))
|
316
|
-
if response[:headers]['content-type'].include?("application/json")
|
317
|
-
normalize_search_response JSON.parse(response[:body])
|
318
|
-
else
|
319
|
-
response[:body]
|
320
|
-
end
|
321
|
-
end
|
322
|
-
|
323
|
-
# (Riak Search) Updates a search index (includes deletes).
|
324
|
-
# @param [String, nil] index the index to update, or nil for the
|
325
|
-
# default index.
|
326
|
-
# @param [String] updates an XML update string in Solr's required format
|
327
|
-
# @see Client#index
|
328
|
-
def update_search_index(index, updates)
|
329
|
-
post(200, solr_update_path(index), updates, {'Content-Type' => 'text/xml'})
|
330
|
-
end
|
331
|
-
|
332
|
-
# (Luwak) Fetches a file from the Luwak large-file interface.
|
333
|
-
# @param [String] filename the name of the file
|
334
|
-
# @yield [chunk] A block which will receive individual chunks of
|
335
|
-
# the file as they are streamed
|
336
|
-
# @yieldparam [String] chunk a block of the file
|
337
|
-
# @return [IO, nil] the file (also having content_type and
|
338
|
-
# original_filename accessors). The file will need to be
|
339
|
-
# reopened to be read
|
340
|
-
def get_file(filename, &block)
|
341
|
-
if block_given?
|
342
|
-
get(200, luwak_path(filename), &block)
|
343
|
-
nil
|
344
|
-
else
|
345
|
-
tmpfile = LuwakFile.new(escape(filename))
|
346
|
-
begin
|
347
|
-
response = get(200, luwak_path(filename)) do |chunk|
|
348
|
-
tmpfile.write chunk
|
349
|
-
end
|
350
|
-
tmpfile.content_type = response[:headers]['content-type'].first
|
351
|
-
tmpfile
|
352
|
-
ensure
|
353
|
-
tmpfile.close
|
354
|
-
end
|
355
|
-
end
|
356
|
-
end
|
357
|
-
|
358
|
-
# (Luwak) Detects whether a file exists in the Luwak large-file
|
359
|
-
# interface.
|
360
|
-
# @param [String] filename the name of the file
|
361
|
-
# @return [true,false] whether the file exists
|
362
|
-
def file_exists?(filename)
|
363
|
-
result = head([200,404], luwak_path(filename))
|
364
|
-
result[:code] == 200
|
365
|
-
end
|
366
|
-
|
367
|
-
# (Luwak) Deletes a file from the Luwak large-file interface.
|
368
|
-
# @param [String] filename the name of the file
|
369
|
-
def delete_file(filename)
|
370
|
-
delete([204,404], luwak_path(filename))
|
371
|
-
end
|
372
|
-
|
373
|
-
# (Luwak) Uploads a file to the Luwak large-file interface.
|
374
|
-
# @overload store_file(filename, content_type, data)
|
375
|
-
# Stores the file at the given key/filename
|
376
|
-
# @param [String] filename the key/filename for the object
|
377
|
-
# @param [String] content_type the MIME Content-Type for the data
|
378
|
-
# @param [IO, String] data the contents of the file
|
379
|
-
# @overload store_file(content_type, data)
|
380
|
-
# Stores the file with a server-determined key/filename
|
381
|
-
# @param [String] content_type the MIME Content-Type for the data
|
382
|
-
# @param [String, #read] data the contents of the file
|
383
|
-
# @return [String] the key/filename where the object was stored
|
384
|
-
def store_file(*args)
|
385
|
-
data, content_type, filename = args.reverse
|
386
|
-
if filename
|
387
|
-
put(204, luwak_path(filename), data, {"Content-Type" => content_type})
|
388
|
-
filename
|
389
|
-
else
|
390
|
-
response = post(201, luwak_path(nil), data, {"Content-Type" => content_type})
|
391
|
-
response[:headers]["location"].first.split("/").last
|
392
|
-
end
|
393
|
-
end
|
394
|
-
|
395
|
-
private
|
396
|
-
def normalize_search_response(json)
|
397
|
-
{}.tap do |result|
|
398
|
-
if json['response']
|
399
|
-
result['num_found'] = json['response']['numFound']
|
400
|
-
result['max_score'] = json['response']['maxScore'].to_f
|
401
|
-
result['docs'] = json['response']['docs'].map do |d|
|
402
|
-
if d['fields']
|
403
|
-
d['fields'].merge('id' => d['id'])
|
404
|
-
else
|
405
|
-
d
|
406
|
-
end
|
407
|
-
end
|
408
|
-
end
|
409
|
-
end
|
410
|
-
end
|
411
|
-
end
|
412
|
-
end
|
413
|
-
end
|