omgf 0.0.0.GIT
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.document +4 -0
- data/.gitignore +14 -0
- data/.wrongdoc.yml +6 -0
- data/COPYING +661 -0
- data/GIT-VERSION-GEN +40 -0
- data/GNUmakefile +5 -0
- data/README +69 -0
- data/examples/hyst.README +90 -0
- data/examples/hyst.bash +365 -0
- data/lib/omgf.rb +6 -0
- data/lib/omgf/hysterical_raisins.rb +336 -0
- data/lib/omgf/pool.rb +48 -0
- data/omgf.gemspec +26 -0
- data/pkg.mk +175 -0
- data/test/integration.rb +201 -0
- data/test/test_hyst.rb +78 -0
- data/test/test_hysterical_raisins.rb +238 -0
- metadata +134 -0
data/GIT-VERSION-GEN
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
|
|
3
|
+
GVF=GIT-VERSION-FILE
|
|
4
|
+
DEF_VER=v0.0.0.GIT
|
|
5
|
+
|
|
6
|
+
LF='
|
|
7
|
+
'
|
|
8
|
+
|
|
9
|
+
# First see if there is a version file (included in release tarballs),
|
|
10
|
+
# then try git-describe, then default.
|
|
11
|
+
if test -f version
|
|
12
|
+
then
|
|
13
|
+
VN=$(cat version) || VN="$DEF_VER"
|
|
14
|
+
elif test -d .git -o -f .git &&
|
|
15
|
+
VN=$(git describe --abbrev=4 HEAD 2>/dev/null) &&
|
|
16
|
+
case "$VN" in
|
|
17
|
+
*$LF*) (exit 1) ;;
|
|
18
|
+
v[0-9]*)
|
|
19
|
+
git update-index -q --refresh
|
|
20
|
+
test -z "$(git diff-index --name-only HEAD --)" ||
|
|
21
|
+
VN="$VN-dirty" ;;
|
|
22
|
+
esac
|
|
23
|
+
then
|
|
24
|
+
VN=$(echo "$VN" | sed -e 's/-/./g');
|
|
25
|
+
else
|
|
26
|
+
VN="$DEF_VER"
|
|
27
|
+
fi
|
|
28
|
+
|
|
29
|
+
VN=$(expr "$VN" : v*'\(.*\)')
|
|
30
|
+
|
|
31
|
+
if test -r $GVF
|
|
32
|
+
then
|
|
33
|
+
VC=$(sed -e 's/^GIT_VERSION = //' <$GVF)
|
|
34
|
+
else
|
|
35
|
+
VC=unset
|
|
36
|
+
fi
|
|
37
|
+
test "$VN" = "$VC" || {
|
|
38
|
+
echo >&2 "GIT_VERSION = $VN"
|
|
39
|
+
echo "GIT_VERSION = $VN" >$GVF
|
|
40
|
+
}
|
data/GNUmakefile
ADDED
data/README
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
= OMGF - hysterical REST API for MogileFS using Rack
|
|
2
|
+
|
|
3
|
+
OMGF provides an HTTP interface for {MogileFS}[http://mogilefs.org],
|
|
4
|
+
allowing clients to connect to MogileFS without needing specialized
|
|
5
|
+
libraries to interface with MogileFS. Currently, WebDAV is supported.
|
|
6
|
+
OMGF is short for "OMGFiles", an anagram for "MogileFS".
|
|
7
|
+
|
|
8
|
+
It is built on top of the
|
|
9
|
+
{Ruby mogilefs-client}[http://bogomips.org/mogilefs-client] library
|
|
10
|
+
|
|
11
|
+
== Features
|
|
12
|
+
|
|
13
|
+
* Can be used unify multiple, independent MogileFS installations
|
|
14
|
+
into a single HTTP endpoint.
|
|
15
|
+
|
|
16
|
+
== Install
|
|
17
|
+
|
|
18
|
+
OMGF is only compatible with Ruby 1.9.3 and later.
|
|
19
|
+
|
|
20
|
+
You may install it via RubyGems on RubyGems.org:
|
|
21
|
+
|
|
22
|
+
gem install omgf
|
|
23
|
+
|
|
24
|
+
== Usage
|
|
25
|
+
|
|
26
|
+
TODO...
|
|
27
|
+
|
|
28
|
+
== Development
|
|
29
|
+
|
|
30
|
+
You can get the latest source via git from the following locations
|
|
31
|
+
(these versions may not be stable):
|
|
32
|
+
|
|
33
|
+
git://bogomips.org/omgf.git
|
|
34
|
+
git://repo.or.cz/omgf.git (mirror)
|
|
35
|
+
|
|
36
|
+
You may browse the code from the web and download the latest snapshot
|
|
37
|
+
tarballs here:
|
|
38
|
+
|
|
39
|
+
== Disclaimer
|
|
40
|
+
|
|
41
|
+
There is NO WARRANTY whatsoever if anything goes wrong, but let us know
|
|
42
|
+
and we'll try our best to fix it.
|
|
43
|
+
|
|
44
|
+
All feedback (bug reports, user/development dicussion, patches, pull
|
|
45
|
+
requests) go to the mailing list/newsgroup. See the ISSUES document for
|
|
46
|
+
information on the {omgf mailing list}[mailto:omgf@librelist.org]
|
|
47
|
+
|
|
48
|
+
You're can (and probably should) restrict access to OMGF hosts/ports.
|
|
49
|
+
Anybody who can access OMGF must also be able to access the source code
|
|
50
|
+
in accordance with the AGPL. OMGF is not written/audited by a security
|
|
51
|
+
expert, so it is likely not secure and you should only give trusted
|
|
52
|
+
users access to it.
|
|
53
|
+
|
|
54
|
+
== License
|
|
55
|
+
|
|
56
|
+
OMGF is copyrighted Free Software by all contributors, see logs in
|
|
57
|
+
revision control for names and email addresses of all of them.
|
|
58
|
+
|
|
59
|
+
You can redistribute OMGF and/or modify it under the terms of the GNU
|
|
60
|
+
Affero General Public License, version 3 or later as published by the
|
|
61
|
+
Free Software Foundation.
|
|
62
|
+
|
|
63
|
+
OMGF is distributed in the hope that it will be useful, but WITHOUT
|
|
64
|
+
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
65
|
+
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
|
|
66
|
+
License for more details.
|
|
67
|
+
|
|
68
|
+
You should have received a copy of the GNU Affero General Public License
|
|
69
|
+
along with OMGF; if not, see https://www.gnu.org/licenses
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
NAME
|
|
2
|
+
hyst - command-line client for OMGF::HystericalRaisins REST API
|
|
3
|
+
SYNOPSIS
|
|
4
|
+
hyst <ls|cat|zcat|get|put|tee|stat|rm|wait-repl>
|
|
5
|
+
DESCRIPTION
|
|
6
|
+
A simple command-line client for the OMGF::HystericalRaisins REST API
|
|
7
|
+
|
|
8
|
+
COMMANDS
|
|
9
|
+
|
|
10
|
+
ls [-l -h -a -A<AFTER>] [<PREFIX>]
|
|
11
|
+
- list keys, matching optional PREFIX
|
|
12
|
+
|
|
13
|
+
Options:
|
|
14
|
+
-l show long listing format: "devcount size key"
|
|
15
|
+
-h with -l, show sizes in human-readable format
|
|
16
|
+
-a list MogileFS-specific metadata keys as well
|
|
17
|
+
-A<AFTER> list keys that are sorted (ASCII-betically) after
|
|
18
|
+
the given argument
|
|
19
|
+
|
|
20
|
+
cat [-r<RANGE>] <KEY>
|
|
21
|
+
- write contents of KEY to stdout
|
|
22
|
+
|
|
23
|
+
Options:
|
|
24
|
+
-r<RANGE> show only the given byte range, see the curl(1)
|
|
25
|
+
documentation for more details
|
|
26
|
+
|
|
27
|
+
zcat <KEY>
|
|
28
|
+
- pipe contents of KEY to (g)zcat and write the result to stdout
|
|
29
|
+
|
|
30
|
+
get [-r<RANGE>] <KEY>
|
|
31
|
+
- download contents of KEY into the basename of KEY
|
|
32
|
+
|
|
33
|
+
Options:
|
|
34
|
+
-r<RANGE> show only the given byte range, see the curl(1)
|
|
35
|
+
documentation for more details
|
|
36
|
+
|
|
37
|
+
cp <FILE> <KEY>
|
|
38
|
+
- copy the contents of FILE into KEY
|
|
39
|
+
|
|
40
|
+
put [-f] <KEY>
|
|
41
|
+
- copy the contents of KEY (on the local FS) into KEY
|
|
42
|
+
|
|
43
|
+
Options:
|
|
44
|
+
-f overwrite existing keys
|
|
45
|
+
|
|
46
|
+
tee [-f] <KEY>
|
|
47
|
+
- read from stdin and write to both KEY and stdout
|
|
48
|
+
|
|
49
|
+
Options:
|
|
50
|
+
-f overwrite existing keys
|
|
51
|
+
|
|
52
|
+
stat <KEY1> [<KEY2> ...]
|
|
53
|
+
- show metadata in an RFC822-like format
|
|
54
|
+
Currently, `Key', `Size', and `URL-$nr' keys are shown
|
|
55
|
+
|
|
56
|
+
rm <KEY1> [<KEY2> ...]
|
|
57
|
+
- delete given keys
|
|
58
|
+
|
|
59
|
+
wait-repl [-t<TIMEOUT> -n<COUNT>] <KEY>
|
|
60
|
+
- poll the HystericalRaisins server until the replication count of KEY has
|
|
61
|
+
reached the given COUNT. Will exit with status code 1 on
|
|
62
|
+
failure after TIMEOUT seconds and 0 on success if the desired
|
|
63
|
+
replication count is reached.
|
|
64
|
+
|
|
65
|
+
Options:
|
|
66
|
+
-n<COUNT> Wait until there are COUNT copies of KEY.
|
|
67
|
+
Default: 2 copies
|
|
68
|
+
-t<TIMEOUT> exit with failure after TIMEOUT seconds have passed
|
|
69
|
+
and COUNT is not reached. Default is 3600 seconds.
|
|
70
|
+
|
|
71
|
+
ENVIRONMENT
|
|
72
|
+
|
|
73
|
+
MOG_DOMAIN must be set to your MogileFS namespace/domain
|
|
74
|
+
|
|
75
|
+
HYST_HOST needs to be set to point to the HOST:PORT of your
|
|
76
|
+
HystericalRaisins server.
|
|
77
|
+
|
|
78
|
+
HYST_CURL_OPTS specifies command-line options `hyst' passes to
|
|
79
|
+
curl(1). Default: --silent --show-error --fail --no-buffer
|
|
80
|
+
|
|
81
|
+
FILES
|
|
82
|
+
|
|
83
|
+
~/.hystrc - a Bourne-Again SHell shell snippet for sourcing environment
|
|
84
|
+
variables (e.g. HYST_HOST, HYST_CURL_OPTS)
|
|
85
|
+
|
|
86
|
+
SEE ALSO
|
|
87
|
+
curl(1)
|
|
88
|
+
|
|
89
|
+
AUTHOR
|
|
90
|
+
Eric Wong <normalperson@yhbt.net>
|
data/examples/hyst.bash
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Copyright (C) 2008-2012, Eric Wong <normalperson@yhbt.net>
|
|
3
|
+
# License: AGPLv3 or later (https://www.gnu.org/licenses/agpl-3.0.txt)
|
|
4
|
+
|
|
5
|
+
set -e
|
|
6
|
+
# bash v3+ is required for pipefail
|
|
7
|
+
set -o pipefail
|
|
8
|
+
|
|
9
|
+
# standard environment, hyst only supports printable URL-safe characters
|
|
10
|
+
# in the PATH anyways and payload data is binary (like any other UNIX fs).
|
|
11
|
+
unset CDPATH
|
|
12
|
+
export LC_ALL=C LANG=C
|
|
13
|
+
die() {
|
|
14
|
+
echo >&2 "$@"
|
|
15
|
+
exit 1
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if test -f ~/.hystrc
|
|
19
|
+
then
|
|
20
|
+
. ~/.hystrc
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
test -n "$HYST_HOST" || die "E: HYST_HOST must be set in the environment"
|
|
24
|
+
test -n "$MOG_DOMAIN" || die 'E: MOG_DOMAIN must be set in environment'
|
|
25
|
+
|
|
26
|
+
base=http://$HYST_HOST/$MOG_DOMAIN
|
|
27
|
+
valid_ends='a-zA-Z0-9_-'
|
|
28
|
+
valid="./$valid_ends"
|
|
29
|
+
self="$0"
|
|
30
|
+
usage="Usage: $self <ls|cat|zcat|get|put|tee|stat|rm|wait-repl>"
|
|
31
|
+
if test x"$1" = x--help; then die "$usage"; fi
|
|
32
|
+
|
|
33
|
+
# inline awk scripts:
|
|
34
|
+
humanize_begin='BEGIN{ U[0]="K"; U[1]="M"; U[2]="G" }'
|
|
35
|
+
humanize_epl='
|
|
36
|
+
if ($2 > 1024) {
|
|
37
|
+
suff = "";
|
|
38
|
+
for (i = 0; i < 3; i++) {
|
|
39
|
+
$2 /= 1024.0;
|
|
40
|
+
suff = U[i];
|
|
41
|
+
if ($2 < 1024)
|
|
42
|
+
break;
|
|
43
|
+
}
|
|
44
|
+
$2 = sprintf("% 10.1f%s", $2, suff);
|
|
45
|
+
}
|
|
46
|
+
'
|
|
47
|
+
stat_fmt='
|
|
48
|
+
$1 == "Content-Length:" {
|
|
49
|
+
size = $2;
|
|
50
|
+
sub(/\r/, "", size);
|
|
51
|
+
}
|
|
52
|
+
$1 ~ /^X-Url-[0-9]+:$/ {
|
|
53
|
+
id = $1;
|
|
54
|
+
u = $2;
|
|
55
|
+
sub(/X-Url-/, "", id);
|
|
56
|
+
sub(/:/, "", id);
|
|
57
|
+
sub(/\r/, "", u);
|
|
58
|
+
|
|
59
|
+
url[id] = u;
|
|
60
|
+
++nr_url;
|
|
61
|
+
}
|
|
62
|
+
END {
|
|
63
|
+
if (NR == 0) {
|
|
64
|
+
print "Cannot stat key:", key > "/dev/stderr"
|
|
65
|
+
} else {
|
|
66
|
+
print "Key:", key;
|
|
67
|
+
print "Size:", size;
|
|
68
|
+
for(i = 0; i < nr_url; ++i)
|
|
69
|
+
printf "URL-%d: %s\n", i, url[i];
|
|
70
|
+
exit(nr_url == 0 ? 1 : 0);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
'
|
|
74
|
+
stdout_is_null='
|
|
75
|
+
/\/dev\/null$/ {
|
|
76
|
+
null_Mm = $5""$6;
|
|
77
|
+
}
|
|
78
|
+
/\/fd\/1$/ {
|
|
79
|
+
fd_1_Mm = $5""$6;
|
|
80
|
+
}
|
|
81
|
+
END {
|
|
82
|
+
if (NR == 2 && fd_1_Mm == null_Mm)
|
|
83
|
+
print "t"
|
|
84
|
+
}
|
|
85
|
+
'
|
|
86
|
+
|
|
87
|
+
run_curl () {
|
|
88
|
+
o='--silent --show-error --fail --no-buffer'
|
|
89
|
+
curl_opts=${HYST_CURL_OPTS-$o}
|
|
90
|
+
# echo >&2 T: curl $curl_opts $v "$@"
|
|
91
|
+
curl $curl_opts $v "$@"
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
invalid_key () {
|
|
95
|
+
echo >&2 "E: key '$1' is invalid"
|
|
96
|
+
echo >&2 "E: only the following characters are allowed: $valid"
|
|
97
|
+
echo >&2 "E: redundant slashes are not allowed"
|
|
98
|
+
die "E: (no leading or trailing slash '/' or period '.')"
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
parse_common() {
|
|
102
|
+
opts="-v $1"; shift
|
|
103
|
+
key_args="$1"; shift
|
|
104
|
+
usage="Usage: $self $command [$opts] $key_args"
|
|
105
|
+
if test x"$1" = x--help; then die "$usage"; fi
|
|
106
|
+
optstr=
|
|
107
|
+
req_arg="$(expr x"$key_args" : x'\(<[A-Z0-9]*>\).*$' || :)"
|
|
108
|
+
one_arg="$(expr x"$key_args" : x'\(<[A-Z0-9]*>\)$' || :)"
|
|
109
|
+
file_arg="$(expr x"$key_args" : x'\(<FILE>\)' || :)"
|
|
110
|
+
|
|
111
|
+
f= l= h= file= keys= ok= key= v= o_bool= o_arg=
|
|
112
|
+
|
|
113
|
+
for b in $opts
|
|
114
|
+
do
|
|
115
|
+
o=$(expr x"$b" : 'x-\([A-Za-z]\)$' || :)
|
|
116
|
+
case $o in
|
|
117
|
+
'')
|
|
118
|
+
o=$(expr x"$b" : 'x-\([A-Za-z]\)<[^>]*>$')
|
|
119
|
+
o_arg="$o_arg$o"
|
|
120
|
+
optstr="$optstr$o:"
|
|
121
|
+
;;
|
|
122
|
+
*)
|
|
123
|
+
o_bool="$o_bool$o"
|
|
124
|
+
optstr="$optstr$o"
|
|
125
|
+
;;
|
|
126
|
+
esac
|
|
127
|
+
done
|
|
128
|
+
|
|
129
|
+
opts=$(getopt $optstr $*)
|
|
130
|
+
set -- $opts
|
|
131
|
+
while test $# -ne 0
|
|
132
|
+
do
|
|
133
|
+
i=$1; shift
|
|
134
|
+
case $i in
|
|
135
|
+
-[$o_bool])
|
|
136
|
+
_tmp=$(expr x$i : 'x-\(['$o_bool']\)')
|
|
137
|
+
eval $_tmp=$i
|
|
138
|
+
;;
|
|
139
|
+
-[$o_arg])
|
|
140
|
+
_tmp=$(expr x$i : 'x-\(['$o_arg']\)')
|
|
141
|
+
eval $_tmp=$i"$1"
|
|
142
|
+
shift
|
|
143
|
+
;;
|
|
144
|
+
--)
|
|
145
|
+
case $command in
|
|
146
|
+
ls)
|
|
147
|
+
ok1='\(['$valid']*\)$'
|
|
148
|
+
ok2="$ok1"
|
|
149
|
+
;;
|
|
150
|
+
*)
|
|
151
|
+
ok1='\(['$valid_ends']['$valid']*\)$'
|
|
152
|
+
ok2='\(['$valid']*['$valid_ends']\)$'
|
|
153
|
+
;;
|
|
154
|
+
esac
|
|
155
|
+
;;
|
|
156
|
+
*)
|
|
157
|
+
if test -n "$one_arg" && test -n "$keys"
|
|
158
|
+
then
|
|
159
|
+
die "$usage"
|
|
160
|
+
fi
|
|
161
|
+
|
|
162
|
+
if test -n "$file_arg" && test -z "$file"
|
|
163
|
+
then
|
|
164
|
+
file="$i"
|
|
165
|
+
else
|
|
166
|
+
case $i in
|
|
167
|
+
*//*) invalid_key "$i" ;;
|
|
168
|
+
esac
|
|
169
|
+
|
|
170
|
+
k1=$(expr x"$i" : x"$ok1" || :)
|
|
171
|
+
k2=$(expr x"$i" : x"$ok2" || :)
|
|
172
|
+
if test x"$k1" != x"$k2" || test -z "$k1"
|
|
173
|
+
then
|
|
174
|
+
invalid_key "$i"
|
|
175
|
+
fi
|
|
176
|
+
keys="$keys $k1"
|
|
177
|
+
if test -z "$key"; then key="$k1"; fi
|
|
178
|
+
fi
|
|
179
|
+
;;
|
|
180
|
+
esac
|
|
181
|
+
done
|
|
182
|
+
|
|
183
|
+
if test -n "$req_arg"
|
|
184
|
+
then
|
|
185
|
+
test -n "$keys" || die "$usage"
|
|
186
|
+
fi
|
|
187
|
+
|
|
188
|
+
force=
|
|
189
|
+
if test -n "$f"
|
|
190
|
+
then
|
|
191
|
+
force="-H X-OMGF-Force:true"
|
|
192
|
+
fi
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
keys_to_urls() {
|
|
196
|
+
pfx="$1"
|
|
197
|
+
urls=
|
|
198
|
+
for i in $keys; do urls="$urls $pfx $base/$i"; done
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
# run!
|
|
202
|
+
command="$1"
|
|
203
|
+
test -n "$command" || die "$usage"
|
|
204
|
+
shift
|
|
205
|
+
case $command in
|
|
206
|
+
ls)
|
|
207
|
+
parse_common '-l -h -a -A<AFTER> -B<BEFORE>' '[<PREFIX>]' "$@"
|
|
208
|
+
limit=1000 last= prefix="$key" begin=
|
|
209
|
+
epl='print $1'
|
|
210
|
+
query="?limit=$limit"
|
|
211
|
+
|
|
212
|
+
if test -n "$B"
|
|
213
|
+
then
|
|
214
|
+
before="$(expr x"$B" : 'x-B\(.*\)$')"
|
|
215
|
+
before='$1 >= "'$before'" { exit 0 }'
|
|
216
|
+
fi
|
|
217
|
+
case "$B,$a" in
|
|
218
|
+
,) match= ;;
|
|
219
|
+
,-a) match= ;;
|
|
220
|
+
*,-a) match="$before" ;;
|
|
221
|
+
*,) match="$before" ;;
|
|
222
|
+
esac
|
|
223
|
+
|
|
224
|
+
if test -n "$l"
|
|
225
|
+
then
|
|
226
|
+
epl='printf "% 2d % 16s %s\n", $3, $2, $1;'
|
|
227
|
+
if test -n "$h"
|
|
228
|
+
then
|
|
229
|
+
epl="$humanize_epl$epl"
|
|
230
|
+
begin="$humanize_begin"
|
|
231
|
+
fi
|
|
232
|
+
fi
|
|
233
|
+
|
|
234
|
+
test -z "$prefix" || query="$query&prefix=$prefix"
|
|
235
|
+
a="$begin$match{$epl}"'END {print "last="$1, "nr="NR >"/dev/stderr"}'
|
|
236
|
+
|
|
237
|
+
q="$query"
|
|
238
|
+
if test -n "$A"
|
|
239
|
+
then
|
|
240
|
+
after="$(expr x"$A" : 'x-A *\(.*\)$')"
|
|
241
|
+
q="$query&after=$after"
|
|
242
|
+
fi
|
|
243
|
+
|
|
244
|
+
nr=$limit
|
|
245
|
+
while test -n "$nr" && test $nr -eq $limit
|
|
246
|
+
do
|
|
247
|
+
# send curl and awk stdout to our stdout
|
|
248
|
+
# send curl stderr to our stderr
|
|
249
|
+
# capture awk stderr for eval
|
|
250
|
+
exec 3>&1
|
|
251
|
+
exec 4>&2
|
|
252
|
+
nr=
|
|
253
|
+
eval $(run_curl "$base$q" 2>&4 | awk -F'|' "$a" 2>&1 1>&3)
|
|
254
|
+
exec 2>&4
|
|
255
|
+
exec 1>&3
|
|
256
|
+
q="$query&after=$last"
|
|
257
|
+
done
|
|
258
|
+
;;
|
|
259
|
+
cat)
|
|
260
|
+
parse_common '-r<RANGE>' '<KEY1> [<KEY2> ...]' "$@"
|
|
261
|
+
keys_to_urls
|
|
262
|
+
run_curl -L $r $urls
|
|
263
|
+
;;
|
|
264
|
+
zcat)
|
|
265
|
+
parse_common '' '<KEY1> [<KEY2> ...]' "$@"
|
|
266
|
+
keys_to_urls
|
|
267
|
+
|
|
268
|
+
# favor pigz (http://zlib.net/pigz) as it's slightly faster
|
|
269
|
+
case "$(which pigz 2>/dev/null || :)" in
|
|
270
|
+
'') GZIP=gzip ;;
|
|
271
|
+
*/pigz) GZIP=pigz ;;
|
|
272
|
+
esac
|
|
273
|
+
|
|
274
|
+
run_curl -L $urls | $GZIP -dc
|
|
275
|
+
;;
|
|
276
|
+
get)
|
|
277
|
+
parse_common '-r<RANGE>' '<KEY1> [<KEY2> ...]' "$@"
|
|
278
|
+
keys_to_urls -O
|
|
279
|
+
run_curl -L $r $urls
|
|
280
|
+
;;
|
|
281
|
+
put)
|
|
282
|
+
parse_common '-f' '<KEY1> [<KEY2> ...]' "$@"
|
|
283
|
+
for file in $keys
|
|
284
|
+
do
|
|
285
|
+
test -f "$file" || die "E: FILE: $file is not a regular file"
|
|
286
|
+
done
|
|
287
|
+
for file in $keys
|
|
288
|
+
do
|
|
289
|
+
run_curl -H Expect: -T "$file" "$base/$file"
|
|
290
|
+
done
|
|
291
|
+
;;
|
|
292
|
+
cp)
|
|
293
|
+
parse_common '-f' '<FILE> <KEY>' "$@"
|
|
294
|
+
test -n "$file" || die "E: <FILE> argument required"
|
|
295
|
+
test -n "$key" || die "E: <KEY> argument required"
|
|
296
|
+
test -f "$file" || die "E: FILE: $file is not a regular file"
|
|
297
|
+
run_curl -H Expect: -T "$file" "$base/$key"
|
|
298
|
+
;;
|
|
299
|
+
tee)
|
|
300
|
+
parse_common '-f' '<KEY>' "$@"
|
|
301
|
+
test -n "$key" || die "E: <KEY> argument required"
|
|
302
|
+
|
|
303
|
+
# optimization: avoid writing to stdout if stdout is /dev/null
|
|
304
|
+
# this should work on Solaris, too, tested on Linux
|
|
305
|
+
is_null=$(ls -lL /dev/null /proc/$$/fd/1 2>/dev/null \
|
|
306
|
+
| awk "$stdout_is_null")
|
|
307
|
+
|
|
308
|
+
if test -n "$is_null"
|
|
309
|
+
then
|
|
310
|
+
run_curl -H Expect: -T- "$base/$key"
|
|
311
|
+
else
|
|
312
|
+
teetmp="$(mktemp -t hyst.$command.$$.XXXXXXXX)"
|
|
313
|
+
rm $teetmp
|
|
314
|
+
trap 'rm -f $teetmp' EXIT
|
|
315
|
+
mkfifo $teetmp
|
|
316
|
+
|
|
317
|
+
run_curl -H Expect: -T- "$base/$key" < $teetmp >/dev/null &
|
|
318
|
+
# tee(1) feeds curl via fifo
|
|
319
|
+
tee $teetmp
|
|
320
|
+
wait
|
|
321
|
+
fi
|
|
322
|
+
|
|
323
|
+
;;
|
|
324
|
+
stat)
|
|
325
|
+
parse_common '' '<KEY1> [<KEY2> ...]' "$@"
|
|
326
|
+
for i in $keys
|
|
327
|
+
do
|
|
328
|
+
test x"$i" = x"$key" || echo
|
|
329
|
+
run_curl -iI "$base/$i" | awk "BEGIN{key=\"$i\"}$stat_fmt"
|
|
330
|
+
done
|
|
331
|
+
;;
|
|
332
|
+
rm)
|
|
333
|
+
parse_common '' '<KEY1> [<KEY2> ...]' "$@"
|
|
334
|
+
keys_to_urls
|
|
335
|
+
run_curl -X DELETE $urls
|
|
336
|
+
;;
|
|
337
|
+
wait-repl)
|
|
338
|
+
parse_common '-t<TIMEOUT> -n<COUNT>' '<KEY>' "$@"
|
|
339
|
+
test -n "$key" || die "E: <KEY> argument required"
|
|
340
|
+
case $t in
|
|
341
|
+
'') timeout=3600 ;;
|
|
342
|
+
*)
|
|
343
|
+
timeout="$(expr x"$t" : 'x-t *\([0-9]\+\)$' || :)"
|
|
344
|
+
test -z "$timeout" && die "timeout=$t not an integer"
|
|
345
|
+
;;
|
|
346
|
+
esac
|
|
347
|
+
timeout=$(( $(date +%s) + $timeout ))
|
|
348
|
+
case $n in
|
|
349
|
+
'') count=2 ;;
|
|
350
|
+
*)
|
|
351
|
+
count="$(expr x"$n" : 'x-n *\([0-9]\+\)$' || :)"
|
|
352
|
+
test -z "$count" && die "count=$n not an integer"
|
|
353
|
+
;;
|
|
354
|
+
esac
|
|
355
|
+
|
|
356
|
+
while test $(date +%s) -lt $timeout
|
|
357
|
+
do
|
|
358
|
+
nr=$(run_curl -iI "$base/$key" | grep -ic '^X-Url-[0-9]\+:')
|
|
359
|
+
test $nr -ge $count && exit 0
|
|
360
|
+
sleep 1
|
|
361
|
+
done
|
|
362
|
+
exit 1
|
|
363
|
+
;;
|
|
364
|
+
*) die "$usage" ;;
|
|
365
|
+
esac
|