infod 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. data/infod.rb +2 -3
  2. data/infod/Es.rb +31 -67
  3. data/infod/{W/source.rb → Es/code.rb} +6 -10
  4. data/infod/Es/css.rb +21 -0
  5. data/infod/{W → Es}/csv.rb +0 -0
  6. data/infod/Es/du.rb +16 -0
  7. data/infod/{W → Es}/feed.rb +13 -11
  8. data/infod/Es/filter.rb +75 -0
  9. data/infod/Es/find.rb +20 -0
  10. data/infod/Es/fs.rb +145 -136
  11. data/infod/Es/glob.rb +22 -0
  12. data/infod/Es/grep.rb +61 -0
  13. data/infod/Es/groonga.rb +47 -56
  14. data/infod/Es/html.rb +271 -0
  15. data/infod/Es/image.rb +114 -0
  16. data/infod/Es/in.rb +68 -0
  17. data/infod/Es/index.rb +183 -0
  18. data/infod/{W → Es}/json.rb +28 -4
  19. data/infod/Es/kv.rb +60 -0
  20. data/infod/Es/ls.rb +58 -0
  21. data/infod/Es/mail.rb +87 -0
  22. data/infod/Es/man.rb +112 -0
  23. data/infod/Es/mime.rb +59 -0
  24. data/infod/Es/out.rb +52 -0
  25. data/infod/{W/page.rb → Es/pager.rb} +7 -3
  26. data/infod/Es/pdf.rb +19 -0
  27. data/infod/Es/rdf.rb +35 -0
  28. data/infod/Es/schema.rb +99 -0
  29. data/infod/Es/search.rb +24 -0
  30. data/infod/Es/sh.rb +21 -0
  31. data/infod/{W → Es}/text.rb +26 -14
  32. data/infod/H.rb +15 -29
  33. data/infod/H/audio.rb +19 -0
  34. data/infod/H/blog.rb +15 -0
  35. data/infod/{W → H}/cal.rb +2 -31
  36. data/infod/H/edit.rb +88 -0
  37. data/infod/{W/examine/examine.rb → H/facets.rb} +17 -17
  38. data/infod/{W → H}/forum.rb +1 -0
  39. data/infod/{W/examine/sw.rb → H/hf.rb} +12 -12
  40. data/infod/H/histogram.rb +78 -0
  41. data/infod/H/mail.rb +92 -0
  42. data/infod/{W/chat.rb → H/microblog.rb} +21 -16
  43. data/infod/H/threads.rb +77 -0
  44. data/infod/H/time.rb +131 -0
  45. data/infod/H/who.rb +30 -0
  46. data/infod/{W → H}/wiki.rb +0 -0
  47. data/infod/K.rb +28 -60
  48. data/infod/N.rb +151 -74
  49. data/infod/Rb.rb +3 -3
  50. data/infod/Th.rb +27 -101
  51. data/infod/Th/404.rb +29 -36
  52. data/infod/Th/500.rb +36 -5
  53. data/infod/Th/GET.rb +48 -118
  54. data/infod/Th/POST.rb +31 -11
  55. data/infod/Th/perf.rb +37 -0
  56. data/infod/Th/util.rb +89 -0
  57. data/infod/Y.rb +24 -7
  58. data/infod/infod.rb +2 -3
  59. metadata +92 -64
  60. data/infod/Es/redis.rb +0 -3
  61. data/infod/Es/sqlite.rb +0 -3
  62. data/infod/Th/local.rb +0 -22
  63. data/infod/W.rb +0 -34
  64. data/infod/W/audio.rb +0 -56
  65. data/infod/W/blog.rb +0 -3
  66. data/infod/W/color.rb +0 -28
  67. data/infod/W/core.rb +0 -77
  68. data/infod/W/css.rb +0 -24
  69. data/infod/W/du.rb +0 -35
  70. data/infod/W/edit.rb +0 -8
  71. data/infod/W/examine/exhibit.rb +0 -34
  72. data/infod/W/examine/hist.rb +0 -55
  73. data/infod/W/examine/history.rb +0 -19
  74. data/infod/W/examine/normal.rb +0 -31
  75. data/infod/W/examine/protovis.rb +0 -30
  76. data/infod/W/examine/time/graph.rb +0 -86
  77. data/infod/W/examine/time/line.rb +0 -24
  78. data/infod/W/find.rb +0 -24
  79. data/infod/W/grep.rb +0 -27
  80. data/infod/W/html.rb +0 -143
  81. data/infod/W/image.rb +0 -61
  82. data/infod/W/kv.rb +0 -66
  83. data/infod/W/ls.rb +0 -50
  84. data/infod/W/mail.rb +0 -248
  85. data/infod/W/pdf.rb +0 -16
  86. data/infod/W/post.rb +0 -9
  87. data/infod/W/rdf.rb +0 -32
  88. data/infod/W/schema.rb +0 -172
  89. data/infod/W/search.rb +0 -33
  90. data/infod/W/shell.rb +0 -30
  91. data/infod/W/table.rb +0 -87
  92. data/infod/W/tree.rb +0 -26
  93. data/infod/W/vfs.rb +0 -175
data/infod/Es/image.rb ADDED
@@ -0,0 +1,114 @@
1
+ #watch __FILE__
2
+ class E
3
+
4
+ def triplrImage &f
5
+ triplrStdOut 'exiftool', EXIF, &f
6
+ end
7
+
8
+ # export EXIF to RDF-in-JSON (recursive)
9
+ def exif
10
+ take.map{|g|
11
+ if g.uri.match /(jpe?g|gif|png)$/i
12
+ e = g.ef
13
+ if !e.e || e.m < g.m
14
+ g.ef.w g.fromStream({},:triplrImage), true
15
+ puts "EXIF #{g} #{g.ef.size} bytes"
16
+ end
17
+ end}
18
+ end
19
+
20
+ fn 'req/scaleImage',->e,r{
21
+ i = [e,e.pathSegment].compact.find(&:f)
22
+ if i && i.size > 0
23
+ size = r.q['px'].to_i.min(8).max(4096)
24
+ stat = i.node.stat
25
+ id = [stat.ino,stat.mtime,size].h.dive
26
+ path = E['/E/image/'+id+'.png']
27
+ if !path.e
28
+ path.dirname.mk
29
+ if i.mimeP.match(/^video/)
30
+ `ffmpegthumbnailer -s #{size} -i #{i.sh} -o #{path.sh}`
31
+ else
32
+ `gm convert #{i.sh} -thumbnail "#{size}x#{size}" #{path.sh}`
33
+ end
34
+ end
35
+ path.e ? (path.env r).getFile : F[E404][e,r]
36
+ else
37
+ F[E404][e,r]
38
+ end}
39
+
40
+ fn 'view/img',->i,_{
41
+ [i.values.map{|i|
42
+ [{_: :a, href: i.url,
43
+ c: {_: :img,
44
+ style:'float:left;max-width:61.8%',
45
+ src: i.url}},
46
+ i.html]},
47
+ (H.css '/css/img')]}
48
+
49
+ fn 'view/th',->i,e{
50
+ i.map{|u,i| u.match(/(gif|jpe?g|png|tiff)$/i) &&
51
+ {_: :a, href: i.url+'?triplr=triplrImage&view=img',
52
+ c: {_: :img, src: i.url+'?y=scaleImage&px=233'}}}}
53
+
54
+ F['view/'+MIMEtype+'image/gif'] = F['view/th']
55
+ F['view/'+MIMEtype+'image/jpeg']= F['view/th']
56
+ F['view/'+MIMEtype+'image/png'] = F['view/th']
57
+
58
+ # display just the images found in content
59
+ fn 'view/imgs',->m,e{
60
+
61
+ # height argument
62
+ h = e.q['h'].do{|h|
63
+ h.match(/^[0-9]+$/).do{|_|'height:'+h+'px'}}
64
+
65
+ # visited images
66
+ seen={}
67
+
68
+ # extension-based filter
69
+ x=->i{i&&i.match(/(jpe?g|gif|png)$/i)&&i}
70
+
71
+ [(H.once e,:mu,H.js('/js/mu')),H.js('/js/images'),
72
+ m.values.map{|v|
73
+ # CSS selector-based search
74
+ [[*v[Content]].map{|c| c.class == String &&
75
+ (Nokogiri::HTML.parse(c).do{|c|
76
+ [# <img> elements
77
+ c.css('img').map{|i|i['src']}.compact,
78
+ # <a> elements with image extensions
79
+ c.css('a').map{|i|i['href']}.select(&x)]
80
+ })},
81
+ # check subject URI for image extension
82
+ x.(v.uri),
83
+ # check object URIs for image extension
84
+ (v.respond_to?(:values) &&
85
+ v.values.flatten.map{|v|
86
+ v.respond_to?(:uri) && v.uri
87
+ }.select(&x))
88
+ ].flatten.uniq.compact.map{|s|
89
+ # view
90
+ {uri: s,
91
+ # img and link to containing resource
92
+ c: ->{"<a href='#{v.uri.to_s.do{|u|u.path? ? u : u.E.url}}'><img style='float:left;#{h}' src='#{s}'></a>"}}}}.flatten.map{|i|
93
+ # show and mark as seen
94
+ !seen[i[:uri]] &&
95
+ (seen[i[:uri]] = true
96
+ i[:c].())}]}
97
+
98
+ def E.c; '#%06x' % rand(16777216) end
99
+ def E.cs; '#%02x%02x%02x' % F['color/hsv2rgb'][rand*6,1,1] end
100
+
101
+ fn 'color/hsv2rgb',->h,s,v{
102
+ i = h.floor
103
+ f = h - i
104
+ p = v * (1 - s)
105
+ q = v * (1 - (s * f))
106
+ t = v * (1 - (s * (1 - f)))
107
+ r,g,b=[[v,t,p],
108
+ [q,v,p],
109
+ [p,v,t],
110
+ [p,q,v],
111
+ [t,p,v],
112
+ [v,p,q]][i].map{|q|q*255.0}}
113
+
114
+ end
data/infod/Es/in.rb ADDED
@@ -0,0 +1,68 @@
1
+ #watch __FILE__
2
+ class E
3
+
4
+ # Graph -> tripleStream -> Graph
5
+ def fromStream m,*i
6
+ send(*i) do |s,p,o|
7
+ m[s] = {'uri' => s} unless m[s].class == Hash
8
+ m[s][p] ||= []
9
+ m[s][p].push o unless m[s][p].member? o
10
+ end; m
11
+ end
12
+
13
+ # default proto-graph
14
+ # mint graph identifier
15
+ # any graph setup (:g variable mutation) is preserved
16
+ fn 'protograph/',->e,q,g{
17
+ set = (F['set/' + q['set']] || F['set/'])[e,q,g]
18
+ set.map{|u| g[u.uri] ||= u if u.class == E } if set.class == Array
19
+ # unique fingerprint for graph
20
+ [F['docsID'][g],
21
+ F['triplr'][e,q],
22
+ q.has_key?('nocache').do{|_|rand}
23
+ ].h}
24
+
25
+ # default graph
26
+ # filesystem storage
27
+ fn 'graph/',->e,q,m{
28
+ triplr = F['triplr'][e,q]
29
+ m.values.map{|r|
30
+ (r.env e.env).graphFromFile m, triplr if r.class == E }}
31
+
32
+ fn 'docsID',->g{
33
+ g.sort.map{|u,r|
34
+ [u, r.respond_to?(:m) && r.m]}.h}
35
+
36
+ fn 'triplr',->e,q{
37
+ t = q['triplr']
38
+ t && e.respond_to?(t) && t || :triplrMIME }
39
+
40
+ # document-set
41
+ fn 'set/',->e,q,_{
42
+ s = []
43
+ s.concat e.docs
44
+ e.pathSegment.do{|p| s.concat p.docs }
45
+ s }
46
+
47
+ def triplrMIME &b
48
+ mimeP.do{|mime|
49
+ yield uri,E::Type,(E MIMEtype+mimeP)
50
+ (MIMEsource[mimeP]||
51
+ MIMEsource[mimeP.split(/\//)[0]]).do{|s|
52
+ send *s,&b }}
53
+ end
54
+
55
+ def graphFromFile g={}, triplr=:triplrMIME
56
+ g.mergeGraph r(true) if ext=='e' # JSON -> graph
57
+ [:triplrInode, # filesystem data
58
+ triplr].# format-specific tripleStream emitter
59
+ each{|i| fromStream g,i } # tripleStream -> Graph
60
+ g
61
+ end
62
+
63
+ def graph g={}
64
+ docs.map{|d|d.graphFromFile g} # tripleStream -> graph
65
+ g
66
+ end
67
+
68
+ end
data/infod/Es/index.rb ADDED
@@ -0,0 +1,183 @@
1
+ #watch __FILE__
2
+ class E
3
+
4
+ # POSIX-fs based index of triples
5
+ #
6
+
7
+ # index a triple
8
+ def index p,o
9
+ p = p.E
10
+ indexEdit p, (o.class == E ? o : p.literal(o)), nil
11
+ end
12
+
13
+ # index a triple - no input type-normalization
14
+ def indexEdit p,o,a
15
+ return if @noIndex
16
+ p.pIndex.noIndex[o,self,a]
17
+ end
18
+ def noIndex
19
+ @noIndex = 1
20
+ self
21
+ end
22
+
23
+ # reachable graph along named predicate
24
+ def walk p, g={}, v={}
25
+ graph g # cumulative graph
26
+ v[uri] = true # visited mark
27
+
28
+ rel = g[uri].do{|s|s[p]} ||[]
29
+ rev = (p.E.po self) ||[]
30
+
31
+ rel.concat(rev).map{|r|
32
+ v[r.uri] || (r.E.walk p,g,v)}
33
+
34
+ g
35
+ end
36
+
37
+ # subtree traverse
38
+ fn 'set/subtree',->d,r,m{
39
+ c =(r['c'].do{|c|c.to_i + 1} || 3).max(100) # one extra for start of next-page
40
+ o = r['d'] =~ /^a/ ? :asc : :desc # direction
41
+ ('/'.E.take c, o, d.uri).do{|s| # take subtree
42
+ desc, asc = o == :desc ? # orient pagination hints
43
+ [s.pop, s[0]] : [s[0], s.pop]
44
+ m['prev'] = {'uri' => 'prev', 'url' => desc.url,'d' => 'desc'}
45
+ m['next'] = {'uri' => 'next', 'url' => asc.url, 'd' => 'asc'}
46
+ s }}
47
+
48
+ # subtree traverse index on p+o cursor
49
+ fn 'set/index',->d,r,m,f=:rangePO{
50
+ top = (f == :rangeP ? d : r['p']).expand.E
51
+ count = r['c'] &&
52
+ r['c'].to_i.max(1000)+1 || 22
53
+ dir = r['d'] &&
54
+ r['d'].match(/^(a|de)sc$/) &&
55
+ r['d'].to_sym ||
56
+ :desc
57
+
58
+ (top.send f, count, dir, r['offset'],(d if f == :rangePO)).do{|s|
59
+ # pagination pointers
60
+ a,b = s[0], s.size > 1 && s.pop
61
+ desc,asc = r['d'] && r['d']=='asc' && [a,b]||[b,a]
62
+ # insert pointers in response-graph
63
+ m['prev']={'uri' => 'prev','url' => d.url,'d' => 'desc','offset' => desc.uri} if desc
64
+ m['next']={'uri' => 'next','url' => d.url,'d' => 'asc', 'offset' => asc.uri} if asc
65
+ s }}
66
+
67
+ fn 'set/indexP',->d,r,m{Fn 'set/index',d,r,m,:rangeP}
68
+ F['set/indexPO'] = F['set/index']
69
+
70
+ # predicate index
71
+ def pIndex
72
+ prependURI '/index/'
73
+ end
74
+
75
+ # predicate-object index
76
+ def poIndex o
77
+ pIndex.concatURI o
78
+ end
79
+
80
+ # predicate-object index lookup
81
+ def po o
82
+ pIndex[o.class == E ? o : literal(o)]
83
+ end
84
+
85
+ # range query - predicate
86
+ def rangeP n=8,d=:desc,s=nil,o=nil
87
+ pIndex.subtree(n,d,s).map &:ro
88
+ end
89
+
90
+ # range query - predicate-object
91
+ def rangePO n=8,d=:desc,s=nil,o
92
+ poIndex(o).subtree(n,d,s).map &:ro
93
+ end
94
+
95
+ # E -> [node]
96
+ def subtree *a
97
+ u.take *a
98
+ end
99
+
100
+ # E -> [E]
101
+ def take *a
102
+ no.take(*a).map &:E
103
+ end
104
+
105
+ # random leaf
106
+ def randomLeaf
107
+ c.empty? && self || c.r.randomLeaf
108
+ end
109
+
110
+ fn 'set/randomLeaf',->d,e,m{[d.randomLeaf]}
111
+ fn 'req/randomLeaf',->e,r{[302, {Location: e.randomLeaf.uri},[]]}
112
+
113
+
114
+ # enumerate unique predicates in index
115
+ fn '/index/GET',->e,r{
116
+ e.pathSegment.uri.match(/^\/index$/) &&
117
+ (H [{_: :style, c: "a {font-size:3em;display:block}
118
+ a:hover {background-color:#00f}"},
119
+ '/index'.E.take.map{|e|e.uri[6..-1].unpath.do{|p|{_: :a, href: '/@'+URI.escape(p.uri)+'?set=indexP&view=page&v=linkPO&c=12', c: p}}}]).hR}
120
+
121
+ # p/o index-traversal pointers
122
+ fn 'view/linkPO',->d,e{
123
+ p = e['uri']
124
+ [(H.css '/css/index'),(H.js '/js/search'),{_: :b, c: p},
125
+ # front-end to search on SIOC predicate+object URIs
126
+ %w{sioc:has_creator sioc:addressed_to}.member?(p).do{|_|
127
+ {_: :form, action: '/whois',c: [{_: :input, type: :hidden, name: :p, value: p},{_: :input, name: :q}]}
128
+ },
129
+ d.map{|u,r| {c: {_: :a, href: r.url+'?set=indexPO&p=' + (URI.escape p) + '&view=page&views=timegraph,mail&v=multi&c=8', c: u}}}]}
130
+
131
+ end
132
+
133
+
134
+ class Pathname
135
+
136
+ # take N els from fs tree in sorted, depth-first order
137
+ def take count=1000, direction=:desc, offset=nil
138
+
139
+ # construct offset-path
140
+ offset = to_s + offset.gsub(/\/+/,'/').E.path if offset
141
+
142
+ # in-range indicator
143
+ ok = false
144
+
145
+ # result set
146
+ set=[]
147
+
148
+ # asc/desc operators
149
+ v,m={asc: [:id,:>=],
150
+ desc: [:reverse,:<=]}[direction]
151
+
152
+ # visitation function
153
+ visit=->nodes{
154
+
155
+ # sort nodes in asc or desc order
156
+ nodes.sort_by(&:to_s).send(v).each{|n|
157
+ ns = n.to_s
158
+ # have we got enough nodes?
159
+ return if 0 >= count
160
+
161
+ # continue if
162
+ (# already in-range
163
+ ok ||
164
+ # no offset specified
165
+ !offset ||
166
+ # offset satisfies in-range operator
167
+ (sz = [ns,offset].map(&:size).min
168
+ ns[0..sz-1].send(m,offset[0..sz-1]))) && (
169
+ if !(c = n.c).empty? # has children?
170
+ visit.(c) # visit children
171
+ else
172
+ count = count - 1 # decrement wanted-nodes count
173
+ set.push n # add node to result-set
174
+ ok = true # iterator is now within range
175
+ end )}}
176
+
177
+ visit.(c) # start
178
+
179
+ # result set
180
+ set
181
+ end
182
+
183
+ end
@@ -1,3 +1,27 @@
1
+ class Hash
2
+
3
+ def graph g
4
+ g.merge!({uri=>self})
5
+ end
6
+
7
+ def mergeGraph g
8
+ g.values.each{|r|
9
+ r.triples{|s,p,o|
10
+ self[s] = {'uri' => s} unless self[s].class == Hash
11
+ self[s][p] ||= []
12
+ self[s][p].push o unless self[s][p].member? o }} if g
13
+ self
14
+ end
15
+
16
+ # tripleStream emitter
17
+ def triples
18
+ s = uri
19
+ map{|p,o|
20
+ o.class == Array ? o.each{|o| yield s,p,o} : yield(s,p,o) unless p=='uri'}
21
+ end
22
+
23
+ end
24
+
1
25
  class E
2
26
 
3
27
  def triplrJSON
@@ -5,7 +29,7 @@ class E
5
29
  end
6
30
 
7
31
 
8
- # addJSON :: tripleStream -> JSON graph (fs)
32
+ # tripleStream -> fs
9
33
  def addJSON i,g,p=[]
10
34
  fromStream({},i).map{|u,r| # stream -> graph
11
35
  (E u).do{|e| # resource
@@ -14,13 +38,13 @@ class E
14
38
  (p.map{|p|r[p].do{|o|e.index p,o[0]}} # index properties
15
39
  j.w({u => r},true) # write doc
16
40
  puts "a #{e}"
17
- # link opaque-URI docs as siblings of base-URI
41
+ # link opaque-URI docs as siblings of base-URI for doc-discoverability
18
42
  e.a('.e').do{|u| (j.ln u) unless ((j.uri == u.uri) || u.e) }
19
43
  e.roonga g # index content
20
44
  )}}
21
45
  self
22
- rescue Exception => e
23
- puts :addJSON,uri,e
46
+ rescue Exception => e
47
+ puts "addJSON #{e}"
24
48
  end
25
49
 
26
50
  fn 'view/application/json',->m,e{
data/infod/Es/kv.rb ADDED
@@ -0,0 +1,60 @@
1
+ #watch __FILE__
2
+ class E
3
+
4
+ def []= p,o
5
+ self[p,o]
6
+ end
7
+
8
+ def [] p,o=nil, v=nil
9
+ if o # set
10
+ editFs p,o,v
11
+ else # get
12
+ (concatURI p).properties
13
+ end
14
+ end
15
+
16
+ def editFs p, o, oO=nil
17
+ p = p.E
18
+ o = p.literal o unless o.class == E
19
+ t = (concatURI p).concatURI o
20
+ if oO # updated triple
21
+ if t.e # old triple exists?
22
+ t.deleteNode # remove triple
23
+ indexEdit p,o,'' # unindex
24
+ end
25
+ self[p,oO] unless oO.empty? # add triple
26
+ else
27
+ unless t.e # triple exists?
28
+ indexEdit p,o,nil # index triple
29
+ if o.f # add triple
30
+ # link resource
31
+ # puts "link #{o} #{t}"
32
+ o.ln t
33
+ elsif o.e
34
+ # symlink resource
35
+ # puts "symlink #{o} #{t}"
36
+ o.ln_s t
37
+ else
38
+ # name resource
39
+ puts "origin missing #{t} , adding URI ref"
40
+ t.mk
41
+ end
42
+ end
43
+ end
44
+ end
45
+
46
+ def triplrFsStore
47
+ properties.map{|p|
48
+ self[p].map{|o|
49
+ yield uri, p.uri, o}}
50
+ end
51
+
52
+ def deletePredicate p
53
+ self[p].each{|o|self[p,o,'']}
54
+ end
55
+
56
+ def properties
57
+ subtree.map &:ro
58
+ end
59
+
60
+ end