infod 0.0.3.2 → 0.0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  #watch __FILE__
2
- class E
2
+ class R
3
3
  =begin
4
4
  faceted-filter, implemented via dynamically-generated style-sheets
5
5
 
@@ -58,11 +58,11 @@ class E
58
58
 
59
59
  fn 'view/facetSelect',->m,e{
60
60
  [(H.js '/js/facets.select'),(H.js '/js/mu'),(H.css '/css/facets'),
61
- E.graphProperties(m).map{|e|[{c: e},' ']},
61
+ R.graphProperties(m).map{|e|[{c: e},' ']},
62
62
  {_: 'button', c: 'Go'}]}
63
63
 
64
64
  fn 'view/facets',->m,e{
65
- e.q['a'].do{|a|Fn 'facets',a,m,e} ||
66
- (Fn 'view/facetSelect',m,e)}
65
+ e.q['a'].do{|a|F['facets'][a,m,e]} ||
66
+ F['view/facetSelect'][m,e]}
67
67
 
68
68
  end
@@ -34,20 +34,20 @@ module FeedParse
34
34
  puts "no HTTP URIs found #{u}"
35
35
  u = '/junk/'+u.gsub('/','.')
36
36
  end
37
- yield u, E::Type, (E::SIOCt+'BlogPost').E
38
- yield u, E::Type, (E::SIOC+'Post').E
37
+ yield u, R::Type, (R::SIOCt+'BlogPost').R
38
+ yield u, R::Type, (R::SIOC+'Post').R
39
39
 
40
40
  #links
41
41
  inner.scan(%r{<(link|enclosure|media)([^>]+)>}mi){|e|
42
42
  e[1].match(/(href|url|src)=['"]?([^'">\s]+)/).do{|url|
43
- yield(u,E::Atom+'/link/'+((r=e[1].match(/rel=['"]?([^'">\s]+)/)) ? r[1] : e[0]), url[2].E)}}
44
-
43
+ yield(u,R::Atom+'/link/'+((r=e[1].match(/rel=['"]?([^'">\s]+)/)) ? r[1] : e[0]), url[2].R)}}
44
+
45
45
  #elements
46
46
  inner.scan(%r{<([a-z]+:)?([a-z]+)([\s][^>]*)?>(.*?)</\1?\2>}mi){|e|
47
47
  yield u, # s
48
- (x[e[0]&&e[0].chop]||E::RSS)+e[1], # p
48
+ (x[e[0]&&e[0].chop]||R::RSS)+e[1], # p
49
49
  e[3].extend(FeedParse).guess.do{|o|# o
50
- o.match(/\A(\/|http)[\S]+\Z/) ? o.E : E::F['cleanHTML'][o]
50
+ o.match(/\A(\/|http)[\S]+\Z/) ? o.R : R::F['cleanHTML'][o]
51
51
  }}
52
52
  else
53
53
  puts "no post-identifiers found #{u}"
@@ -58,19 +58,19 @@ module FeedParse
58
58
  end
59
59
  end
60
60
 
61
- class E
61
+ class R
62
62
 
63
63
  Atom = W3+'2005/Atom'
64
64
  RSS = Purl+'rss/1.0/'
65
65
  RSSm = RSS+'modules/'
66
- Feed = (E RSS+'channel')
66
+ Feed = (R RSS+'channel')
67
67
 
68
- def listFeeds; (nokogiri.css 'link[rel=alternate]').map{|u|E (URI uri).merge(u.attr :href)} end
68
+ def listFeeds; (nokogiri.css 'link[rel=alternate]').map{|u|R (URI uri).merge(u.attr :href)} end
69
69
  alias_method :feeds, :listFeeds
70
70
 
71
71
  # add existing resources to index
72
72
  #
73
- # 'http:/'.E.take.select{|e|e.ext=='e'}.map{|r|E::FeedArchiver[r,r.graph,'localhost']}
73
+ # 'http:/'.R.take.select{|e|e.ext=='e'}.map{|r|R::FeedArchiver[r,r.graph,'localhost']}
74
74
 
75
75
  FeedArchiver = -> doc, graph, host {
76
76
  doc.roonga host
@@ -79,7 +79,7 @@ class E
79
79
  t = t[0].gsub(/[-T]/,'/').sub /(.00.00|Z)$/, '' # trim normalized timezones and non-unique symbols
80
80
  stop = /\b(at|blog|com(ments)?|html|info|org|photo|p|post|r|status|tag|twitter|wordpress|www|1999|2005)\b/
81
81
  b = (u.sub(/http:\/\//,'.').gsub(/\W/,'..').gsub(stop,'').sub(/\d{12,}/,'')+'.').gsub /\.+/,'.'
82
- doc.ln E["http://#{host}/news/#{t}#{b}e"]}}
82
+ doc.ln R["http://#{host}/news/#{t}#{b}e"]}}
83
83
  doc}
84
84
 
85
85
  GREP_DIRS.push /^\/news\/\d{4}/
@@ -97,7 +97,7 @@ class E
97
97
  Nokogiri::HTML.parse(o).do{|o|
98
98
  o.css('.md').do{|o|yield s,p,o}
99
99
  yield s,Creator,o.css('a')[-4].child.to_s.strip
100
- yield s,Type,(SIOCt+'BoardPost').E
100
+ yield s,Type,(SIOCt+'BoardPost').R
101
101
  } : (yield s,p,o)}
102
102
  end
103
103
 
@@ -1,4 +1,4 @@
1
- class E
1
+ class R
2
2
 
3
3
  =begin
4
4
 
@@ -1,119 +1,83 @@
1
1
  #watch __FILE__
2
+ class R
2
3
 
3
- %w{date digest/sha1 fileutils json open-uri pathname}.each{|r|require(r)}
4
-
5
- class E
6
-
7
- def d
8
- node.to_s
9
- end
10
-
11
- def node
12
- Pathname.new FSbase + path
13
- end
14
- alias_method :no, :node
15
-
16
- def inside
17
- node.expand_path.to_s.index(FSbase) == 0
18
- end
19
-
20
- def siblings
21
- parent.c
22
- end
23
-
24
- def children
25
- node.c.map &:E
26
- end
27
- alias_method :c, :children
28
-
29
-
30
- # node exists?
31
- def exist?
32
- node.exist?
33
- end
34
- alias_method :e, :exist?
35
-
36
- # directory?
37
- def d?
38
- node.directory?
4
+ def [] p; predicate p end
5
+ def []= p,o
6
+ if o
7
+ setFs p,o
8
+ else
9
+ (predicate p).map{|o|
10
+ unsetFs p,o}
11
+ end
39
12
  end
40
13
 
41
- # file?
42
- def file?
43
- node.file?
14
+ def predicate p, short = true
15
+ p = predicatePath p, short
16
+ p.node.take.map{|n|
17
+ if n.file? # literal
18
+ o = n.R
19
+ case o.ext
20
+ when "json"
21
+ o.r true
22
+ else
23
+ o.r
24
+ end
25
+ else # resource
26
+ R[n.to_s.unpath p.d.size]
27
+ end}
28
+ end
29
+
30
+ def setFs p, o, undo = false, short = true
31
+ p = predicatePath p, short # s+p URI
32
+ t,literal = p.objectPath o # s+p+o URI
33
+ puts "#{undo ? :- : :+} <#{t}>"
34
+ if o.class == R # resource
35
+ if undo
36
+ t.delete if t.e # undo
37
+ else
38
+ unless t.e
39
+ if o.f # file?
40
+ o.ln t # link
41
+ else
42
+ t.mk # dirent
43
+ end
44
+ end
45
+ end
46
+ else # literal
47
+ if undo
48
+ t.delete if t.e # remove
49
+ else
50
+ t.w literal unless t.e # write
51
+ end
52
+ end
44
53
  end
45
- alias_method :f, :file?
46
54
 
47
- # modification time
48
- def mtime
49
- node.stat.mtime if e
50
- end
51
- alias_method :m, :mtime
55
+ def unsetFs p,o; setFs p,o,true end
52
56
 
53
57
  def triplrInode
54
58
  if d?
55
59
  yield uri, Posix+'dir#parent', parent
56
- c.map{|c| yield uri, Posix + 'dir#child', E[c.uri.gsub('?','%3F').gsub('#','23')]}
60
+ c.map{|c| yield uri, Posix + 'dir#child', R[c.uri.gsub('?','%3F').gsub('#','23')]}
57
61
  end
58
62
  node.stat.do{|s|[:size,:ftype,:mtime].map{|p| yield uri, Stat+p.to_s, (s.send p)}}
59
63
  end
60
64
 
61
65
  def triplrSymlink
62
66
  realpath.do{|t|
63
- target = t.to_s.index(FSbase)==0 ? t.E : t.to_s
67
+ target = t.to_s.index(FSbase)==0 ? t.R : t.to_s
64
68
  yield uri, '/linkTarget', target }
65
69
  end
66
-
67
- def realpath
68
- node.realpath
69
- rescue Errno::ENOENT
70
- nil
71
- end
72
-
73
- def mk
74
- e || FileUtils.mkdir_p(d)
75
- self
76
- end
77
70
 
78
- # create link
79
- def ln t
80
- t = t.E # cast bare URI/string to resource
71
+ def ln t, y=:link
72
+ t = t.R
73
+ t = t.uri[0..-2].R if t.uri[-1] == '/'
81
74
  if !t.e # destination exist?
82
75
  t.dirname.mk
83
- FileUtils.link node, t.node
76
+ FileUtils.send y, node, t.node
84
77
  end
85
78
  end
86
79
 
87
- # create symlink
88
- def ln_s t
89
- t = t.E # cast bare URI/string to resource
90
- if !t.e # destination exist?
91
- t.dirname.mk
92
- FileUtils.symlink node, t.node
93
- end
94
- end
95
-
96
- def touch
97
- FileUtils.touch node
98
- self
99
- end
100
-
101
- def deleteNode
102
- node.deleteNode if e
103
- self
104
- end
105
-
106
- def size
107
- node.size
108
- end
109
-
110
- def read
111
- f ? r : get
112
- end
113
-
114
- def get
115
- (open uri).read
116
- end
80
+ def ln_s t; ln t, :symlink end
117
81
 
118
82
  def r p=false
119
83
  if f
@@ -131,21 +95,11 @@ class E
131
95
  self
132
96
  end
133
97
 
134
- def writeFile c
135
- File.open(d,'w'){|f|f << c}
136
- end
137
-
138
- def readFile
139
- File.open(d).read
140
- end
141
-
142
98
  end
143
99
 
144
100
  class Pathname
145
101
 
146
- def E
147
- to_s.force_encoding('UTF-8').unpathFs
148
- end
102
+ def R; to_s.force_encoding('UTF-8').unpath end
149
103
 
150
104
  def c
151
105
  return [] unless directory?
@@ -160,9 +114,3 @@ class Pathname
160
114
  end
161
115
 
162
116
  end
163
-
164
- class File::Stat
165
- def utime
166
- mtime.to_i
167
- end
168
- end
@@ -1,15 +1,15 @@
1
1
  #watch __FILE__
2
- class E
2
+ class R
3
3
  =begin
4
4
  graph construction is two-pass:
5
5
 
6
- the first-pass will signify if the second-pass needs to be run. an eTag is be derived from the return-value, ideal fingerprint sources include filestats, mtime checks, extremely trivial SPARQL queries, SHA160 hashes of in-RAM entities.. <http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-25#section-2.3>
6
+ the first-pass will signify if the second-pass needs to be run. an ETag is be derived from the return-value, ideal fingerprint sources include filestats, mtime checks, extremely trivial SPARQL queries, SHA160 hashes of in-RAM entities.. <http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-25#section-2.3>
7
7
 
8
- second-pass might fetch RDF from a SPARQL store. this lib was developed as an alternative to relying on (large, hard-to-implement, must be running, configured & connectable) SPARQL stores by using the filesystem as much as possible, to experiment with hybrids like SPARQLING up a set of files to be returned in standard Apache-as-static-fileserver fashion, and to webize all sorts of non-RDF like email, directories, plain-text etc
8
+ second-pass might fetch RDF from a SPARQL store. this lib was developed as an alternative to relying on (large, hard-to-implement, must be running, configured & connectable) SPARQL stores by using the filesystem as much as possible, to experiment with hybrids like SPARQLING up a set of files to be returned in standard Apache-as-static-fileserver fashion, and to webize non-RDF filesystem-content like email, directories, URLs in plain-text etc
9
9
 
10
10
  triple streams - a source function yields triples up to the caller as it finds them,
11
- a function providing just a block (consume yielded values) is a sink, both is a filter
12
- these can be stacked into pipelines. see the data-massaging stream-processors in feed.rb
11
+ a function providing a block (consumes yielded values) is a sink, both is a filter
12
+ these can be stacked into pipelines. see the data-massaging stream-processing in feed.rb
13
13
 
14
14
  =end
15
15
 
@@ -31,7 +31,7 @@ class E
31
31
  graph = fromStream({},triplr)
32
32
  docs = {}
33
33
  graph.map{|u,r|
34
- e = u.E # resource
34
+ e = u.R # resource
35
35
  doc = e.ef # doc
36
36
  doc.e || # exists - we're nondestructive here
37
37
  (docs[doc.uri] ||= {} # init doc-graph
@@ -40,7 +40,7 @@ class E
40
40
  r[p].do{|v|v.map{|o| # values exist?
41
41
  e.index p,o}}})} # index triple
42
42
  docs.map{|d,g| # resources in docs
43
- d = d.E; puts "+doc #{d}"
43
+ d = d.R; puts "<#{d.docBase}>"
44
44
  d.w g,true # write
45
45
  hook[d,g,host] if hook} # insert-hook
46
46
  graph.triples &b if b # emit triples
@@ -56,7 +56,7 @@ class E
56
56
  g.delete '#'
57
57
  else
58
58
  g['#'][RDFs+'member'] = set
59
- g['#'][Type] = E[HTTP+'Response']
59
+ g['#'][Type] = R[HTTP+'Response']
60
60
  set.map{|u| g[u.uri] = u } # thunk
61
61
  end
62
62
  F['docsID'][g,q]}
@@ -71,8 +71,8 @@ class E
71
71
  t = ::Date.parse "#{m[2]}-#{m[3]}-#{m[4]}"
72
72
  pp = m[1] + (t-1).strftime('%Y/%m/%d') + m[5]
73
73
  np = m[1] + (t+1).strftime('%Y/%m/%d') + m[5]
74
- u[Prev] = {'uri' => pp} if pp.E.e || E['http://' + e.env['SERVER_NAME'] + pp].e
75
- u[Next] = {'uri' => np} if np.E.e || E['http://' + e.env['SERVER_NAME'] + np].e }
74
+ u[Prev] = {'uri' => pp} if pp.R.e || R['http://' + e.env['SERVER_NAME'] + pp].e
75
+ u[Next] = {'uri' => np} if np.R.e || R['http://' + e.env['SERVER_NAME'] + np].e }
76
76
  s }
77
77
 
78
78
  # fs-derived ID for a resource-set
@@ -86,30 +86,44 @@ class E
86
86
  # update configuration such as q['graph'] = 'hexastore' and return false or call #response..
87
87
  fn 'graph/',->e,q,m{
88
88
  # force thunks
89
- m.values.map{|r|(r.env e.env).graphFromFile m if r.class == E }
89
+ m.values.map{|r|(r.env e.env).graphFromFile m if r.class == R }
90
90
  # cleanup unexpanded thunks
91
- m.delete_if{|u,r|r.class==E}}
91
+ m.delete_if{|u,r|r.class==R}}
92
92
 
93
93
  def graphFromFile g={}
94
94
  return unless e
95
95
  doc = self
96
96
  unless ext=='e' # already native-format
97
- triplr = @r.do{|r|r.q['triplr'].do{|t| (respond_to? t) && t }} || :triplrMIME
98
- doc = E '/E/rdf/' + [triplr,uri].h.dive
99
- unless doc.e && doc.m > m; # freshness check
97
+ doc = R '/cache/RDF/' + uri.h.dive
98
+ unless doc.e && doc.m > m # up-to-date?
100
99
  graph = {}
101
- [:triplrInode,triplr].each{|t| fromStream graph, t }
100
+ [:triplrInode,:triplrMIME].map{|t| fromStream graph, t}
102
101
  doc.w graph, true
103
102
  end
104
103
  end
105
104
  g.mergeGraph doc.r true
106
105
  end
107
106
 
107
+ def ef; @ef ||= docBase.a('.e') end
108
+
108
109
  def graph g={}
109
110
  docs.map{|d|d.graphFromFile g} # tripleStream -> graph
110
111
  g
111
112
  end
112
113
 
114
+ def docs
115
+ base = docBase
116
+ [(base if pathSegment!='/' && base.e), # doc-base
117
+ (self if base != self && e && uri[-1]!='/'), # requested path
118
+ base.glob(".{e,html,n3,nt,owl,rdf,ttl,txt}"), # docs
119
+ ((d? && uri[-1]=='/' && uri.size>1) ? c : []) # trailing slash -> child resources
120
+ ].flatten.compact
121
+ end
122
+
123
+ def triplrDoc &f; docBase.glob('#*').map{|s| s.triplrResource &f} end
124
+
125
+ def triplrResource; predicates.map{|p| self[p].map{|o| yield uri, p.uri, o}} end
126
+
113
127
  def triplrJSON
114
128
  yield uri, '/application/json', (JSON.parse read) if e
115
129
  rescue Exception => e
@@ -119,12 +133,18 @@ class E
119
133
  to_h.to_json *a
120
134
  end
121
135
 
122
- fn Render+'application/json',->d,_=nil{[d].to_json}
136
+ fn Render+'application/json',->d,_=nil{d.to_json}
123
137
 
124
138
  end
125
139
 
126
140
  class Hash
127
141
 
142
+ def except *ks
143
+ clone.do{|h|
144
+ ks.map{|k|h.delete k}
145
+ h}
146
+ end
147
+
128
148
  def graph g
129
149
  g.merge!({uri=>self})
130
150
  end
@@ -1,16 +1,16 @@
1
1
  #watch __FILE__
2
- class E
2
+ class R
3
3
 
4
4
  fn 'view/'+Posix+'util#grep',-> d,e {{_: :form, c: [{_: :input, name: :q, style: 'font-size:2em'},{_: :input, type: :hidden, name: :set, value: :grep}]}}
5
5
 
6
6
  GREP_DIRS=[]
7
7
 
8
8
  fn 'set/grep',->e,q,m{
9
- q['q'].do{|query| m[e.uri+'#grep'] = {Type => E[Posix+'util#grep']}
9
+ q['q'].do{|query| m[e.uri+'#grep'] = {Type => R[Posix+'util#grep']}
10
10
  path = e.pathSegment
11
11
  GREP_DIRS.find{|p|path.uri.match p}.do{|allow|
12
12
  [e,path].compact.select(&:e).map{|e|
13
- `grep -irl #{query.sh} #{e.sh} | head -n 200`}.map{|r|r.lines.to_a.map{|r|r.chomp.unpathFs}}.flatten
13
+ `grep -irl #{query.sh} #{e.sh} | head -n 200`}.map{|r|r.lines.to_a.map{|r|r.chomp.unpath}}.flatten
14
14
  }||(puts "no grep available on #{path}")}}
15
15
 
16
16
  fn 'view/grep',->d,e{
@@ -52,7 +52,7 @@ class E
52
52
  # match?
53
53
  !g.empty? &&
54
54
  [# link to resource
55
- r.E.do{|e|{_: :a, href: e.url, c: e}}, '<br>',
55
+ r.R.do{|e|{_: :a, href: e.url, c: e}}, '<br>',
56
56
  # show 3 matches per resource
57
57
  [g[-1*(g.size.max 3)..-1].map{|l|
58
58
  # exerpt