bio-publisci 0.0.7 → 0.0.8
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile +1 -1
- data/Rakefile +1 -1
- data/features/orm_steps.rb +4 -4
- data/features/reader.feature +3 -3
- data/features/reader_steps.rb +1 -0
- data/features/writer.feature +7 -2
- data/features/writer_steps.rb +8 -1
- data/lib/bio-publisci.rb +3 -1
- data/lib/bio-publisci/datacube_model.rb +46 -20
- data/lib/bio-publisci/dataset/ORM/data_cube_orm.rb +196 -194
- data/lib/bio-publisci/dataset/ORM/observation.rb +15 -13
- data/lib/bio-publisci/dataset/data_cube.rb +3 -3
- data/lib/bio-publisci/dataset/dataset_for.rb +25 -4
- data/lib/bio-publisci/dsl/dataset_dsl.rb +4 -2
- data/lib/bio-publisci/dsl/dsl.rb +3 -0
- data/lib/bio-publisci/metadata/generator.rb +1 -1
- data/lib/bio-publisci/metadata/metadata_model.rb +27 -0
- data/lib/bio-publisci/metadata/prov/activity.rb +1 -0
- data/lib/bio-publisci/metadata/prov/model/prov_models.rb +33 -2
- data/lib/bio-publisci/query/query_helper.rb +5 -1
- data/lib/bio-publisci/readers/arff.rb +2 -40
- data/lib/bio-publisci/readers/dataframe.rb +1 -1
- data/lib/bio-publisci/writers/arff.rb +42 -16
- data/lib/bio-publisci/writers/base.rb +77 -0
- data/lib/bio-publisci/writers/csv.rb +31 -0
- data/lib/bio-publisci/writers/dataframe.rb +2 -2
- data/resources/queries/codes.rq +10 -5
- data/resources/queries/dimensions.rq +9 -4
- data/resources/queries/measures.rq +7 -2
- data/resources/queries/observations.rq +5 -4
- data/resources/weather.numeric.arff +26 -21
- data/spec/ORM/data_cube_orm_spec.rb +23 -3
- data/spec/ORM/prov_model_spec.rb +53 -0
- data/spec/dataset_for_spec.rb +21 -0
- data/spec/dsl_spec.rb +5 -2
- data/spec/metadata/metadata_dsl_spec.rb +1 -1
- data/spec/r_builder_spec.rb +2 -2
- data/spec/turtle/bacon +1 -1
- data/spec/turtle/reference +1 -1
- data/spec/turtle/weather +275 -0
- data/spec/writer_spec.rb +61 -0
- metadata +66 -28
- checksums.yaml +0 -7
data/Gemfile
CHANGED
data/Rakefile
CHANGED
@@ -21,7 +21,7 @@ Jeweler::Tasks.new do |gem|
|
|
21
21
|
gem.description = %Q{A toolkit for publishing scientific results and datasets using RDF, OWL, and related technologies }
|
22
22
|
gem.email = "wstrinz@gmail.com"
|
23
23
|
gem.authors = ["Will Strinz"]
|
24
|
-
gem.version = "0.0.
|
24
|
+
gem.version = "0.0.8"
|
25
25
|
|
26
26
|
# dependencies defined in Gemfile
|
27
27
|
end
|
data/features/orm_steps.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
require_relative '../lib/bio-publisci.rb'
|
2
2
|
|
3
3
|
Given /^an ORM::DataCube entitled "(.*?)"$/ do |name|
|
4
|
-
@cube = PubliSci::ORM::DataCube.new(name: name)
|
4
|
+
@cube = PubliSci::DataSet::ORM::DataCube.new(name: name)
|
5
5
|
end
|
6
6
|
|
7
7
|
Given /^an ORM::DataCube entitled "(.*?)" with the following options:$/ do |name, opts|
|
@@ -18,7 +18,7 @@ Given /^an ORM::DataCube entitled "(.*?)" with the following options:$/ do |name
|
|
18
18
|
|
19
19
|
options_hash[k] = v
|
20
20
|
}
|
21
|
-
@cube = PubliSci::ORM::DataCube.new(options_hash)
|
21
|
+
@cube = PubliSci::DataSet::ORM::DataCube.new(options_hash)
|
22
22
|
end
|
23
23
|
|
24
24
|
Given(/^a turtle string from file (.*)$/) do |file|
|
@@ -30,7 +30,7 @@ Given(/^the URI string "(.*?)"$/) do |uri|
|
|
30
30
|
end
|
31
31
|
|
32
32
|
When(/^I call the ORM::DataCube class method load on it$/) do
|
33
|
-
@cube = PubliSci::ORM::DataCube.load(@string)
|
33
|
+
@cube = PubliSci::DataSet::ORM::DataCube.load(@string)
|
34
34
|
end
|
35
35
|
|
36
36
|
When /^I add a "(.*?)" dimension$/ do |dim|
|
@@ -70,5 +70,5 @@ Then /^the to_n3 method should return a string with a "(.*?)"$/ do |search|
|
|
70
70
|
end
|
71
71
|
|
72
72
|
Then(/^I should receive an ORM::DataCube object$/) do
|
73
|
-
@cube.is_a?(PubliSci::ORM::DataCube).should == true
|
73
|
+
@cube.is_a?(PubliSci::DataSet::ORM::DataCube).should == true
|
74
74
|
end
|
data/features/reader.feature
CHANGED
@@ -6,20 +6,20 @@ Feature: generate RDF
|
|
6
6
|
Scenario: generate turtle RDF from a Dataframe
|
7
7
|
Given a Dataframe generator
|
8
8
|
When I provide an R dataframe and the label "mr"
|
9
|
-
And generate a turtle string from it
|
9
|
+
And generate a turtle string from it
|
10
10
|
Then the result should contain a "qb:dataSet"
|
11
11
|
And the result should contain some "qb:Observation"s
|
12
12
|
|
13
13
|
Scenario: generate turtle RDF from a CSV
|
14
14
|
Given a CSV generator
|
15
15
|
When I provide the reference file spec/csv/bacon.csv and the label "bacon"
|
16
|
-
And generate a turtle string from it
|
16
|
+
And generate a turtle string from it
|
17
17
|
Then the result should contain a "qb:dataSet"
|
18
18
|
And the result should contain some "qb:Observation"s
|
19
19
|
|
20
20
|
Scenario: generate turtle RDF from an ARFF file
|
21
21
|
Given a ARFF generator
|
22
22
|
When I provide the file resources/weather.numeric.arff
|
23
|
-
And generate a turtle string from it
|
23
|
+
And generate a turtle string from it
|
24
24
|
Then the result should contain a "qb:dataSet"
|
25
25
|
And the result should contain some "qb:Observation"s
|
data/features/reader_steps.rb
CHANGED
data/features/writer.feature
CHANGED
@@ -1,9 +1,14 @@
|
|
1
1
|
Feature: export to various formats using writers
|
2
2
|
|
3
3
|
In order to use RDF encoded data in other applications
|
4
|
-
I want to export domain objects using an PubliSci::
|
4
|
+
I want to export domain objects using an PubliSci::Writers object
|
5
5
|
|
6
6
|
Scenario: write to ARFF format
|
7
7
|
Given a ARFF writer
|
8
8
|
When I call its from_turtle method on the file spec/turtle/bacon
|
9
|
-
Then I should receive a .arff file as a string
|
9
|
+
Then I should receive a .arff file as a string
|
10
|
+
|
11
|
+
Scenario: write to CSV
|
12
|
+
Given a CSV writer
|
13
|
+
When I call its from_turtle method on the file spec/turtle/bacon
|
14
|
+
Then I should receive a .csv file as a string
|
data/features/writer_steps.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
Given(/^a (.*) writer$/) do |type|
|
2
|
-
@writer = PubliSci::
|
2
|
+
@writer = PubliSci::Writers.const_get(type).new
|
3
3
|
end
|
4
4
|
|
5
5
|
When(/^I call its from_turtle method on the file (.*)$/) do |file|
|
@@ -13,5 +13,12 @@ When(/^I call its from_turtle method on the turtle string$/) do
|
|
13
13
|
end
|
14
14
|
|
15
15
|
Then(/^I should receive a \.arff file as a string$/) do
|
16
|
+
puts @result
|
16
17
|
@result.is_a?(String).should be true
|
17
18
|
end
|
19
|
+
|
20
|
+
Then(/^I should receive a \.csv file as a string$/) do
|
21
|
+
puts @result
|
22
|
+
@result.is_a?(String).should be true
|
23
|
+
end
|
24
|
+
|
data/lib/bio-publisci.rb
CHANGED
@@ -34,9 +34,11 @@ load File.dirname(__FILE__) + '/bio-publisci/dataset/data_cube.rb'
|
|
34
34
|
load File.dirname(__FILE__) + '/bio-publisci/dataset/dataset_for.rb'
|
35
35
|
load File.dirname(__FILE__) + '/bio-publisci/dataset/configuration.rb'
|
36
36
|
load File.dirname(__FILE__) + '/bio-publisci/dataset/dataset.rb'
|
37
|
+
load File.dirname(__FILE__) + '/bio-publisci/datacube_model.rb'
|
37
38
|
load File.dirname(__FILE__) + '/bio-publisci/output.rb'
|
38
39
|
load File.dirname(__FILE__) + '/bio-publisci/metadata/prov/element.rb'
|
39
40
|
load File.dirname(__FILE__) + '/bio-publisci/metadata/prov/prov.rb'
|
41
|
+
load File.dirname(__FILE__) + '/bio-publisci/writers/base.rb'
|
40
42
|
|
41
43
|
|
42
44
|
load_folder('bio-publisci/dsl')
|
@@ -47,7 +49,7 @@ load_folder('bio-publisci/readers')
|
|
47
49
|
load_folder('bio-publisci/writers')
|
48
50
|
load_folder('bio-publisci/dataset/ORM')
|
49
51
|
# Dir.foreach(File.dirname(__FILE__) + '/generators') do |file|
|
50
|
-
#
|
52
|
+
# unless file == "." or file == ".."
|
51
53
|
# load File.dirname(__FILE__) + '/generators/' + file
|
52
54
|
# end
|
53
55
|
# end
|
@@ -2,14 +2,6 @@ require 'rdf/4store'
|
|
2
2
|
|
3
3
|
module PubliSci
|
4
4
|
module ORM
|
5
|
-
|
6
|
-
# class Person < Spira::Base
|
7
|
-
# configure :base_uri => "http://example.org/example/people"
|
8
|
-
|
9
|
-
# property :name, :predicate => FOAF.name, :type => String
|
10
|
-
|
11
|
-
# end
|
12
|
-
|
13
5
|
# class Observation < Spira::Base
|
14
6
|
# type RDF::URI.new('http://purl.org/linked-data/cube#Observation')
|
15
7
|
# property :label, predicate: RDFS.label
|
@@ -25,13 +17,14 @@ module PubliSci
|
|
25
17
|
|
26
18
|
class DataStructureDefinition < Spira::Base
|
27
19
|
type QB.DataStructureDefinition
|
20
|
+
property :label, predicate: RDFS.label
|
28
21
|
has_many :component, predicate: QB.component
|
29
22
|
end
|
30
23
|
|
31
24
|
class DataSet < Spira::Base
|
32
25
|
type QB.DataSet
|
33
26
|
property :label, predicate: RDFS.label
|
34
|
-
|
27
|
+
property :structure, predicate: QB.structure
|
35
28
|
end
|
36
29
|
|
37
30
|
class Dimension < Spira::Base
|
@@ -51,20 +44,53 @@ module PubliSci
|
|
51
44
|
Spira.add_repository :default, repo
|
52
45
|
end
|
53
46
|
|
54
|
-
def observation
|
55
|
-
|
56
|
-
|
57
|
-
|
47
|
+
# def observation
|
48
|
+
# unless PubliSci::ORM.const_defined?("Observation")
|
49
|
+
# obs = Class.new(Spira::Base) do
|
50
|
+
# type RDF::URI.new('http://purl.org/linked-data/cube#Observation')
|
58
51
|
|
59
|
-
|
52
|
+
# property :structure, predicate: QB.dataSet
|
60
53
|
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
54
|
+
# ((Dimension.each.to_a | Measure.each.to_a) || []).each{|component|
|
55
|
+
# property strip_uri(component.subject.to_s), predicate: component.subject
|
56
|
+
# }
|
57
|
+
# end
|
58
|
+
# PubliSci::ORM.const_set("Observation",obs)
|
59
|
+
# end
|
60
|
+
# Observation
|
61
|
+
# end
|
62
|
+
|
63
|
+
class Observation < Spira::Base
|
64
|
+
type QB.Observation
|
65
|
+
property :label, predicate: RDFS.label
|
66
|
+
property :dataset, predicate: QB.dataSet
|
67
|
+
|
68
|
+
def load_properties
|
69
|
+
comps = dataset.as(DataSet).structure.as(DataStructureDefinition).component.map{|comp| comp.as(Component)}
|
70
|
+
props = comps.map{|comp| comp.dimension ? comp.dimension.as(Dimension) : comp.measure.as(Measure) }
|
71
|
+
props.each{|prop|
|
72
|
+
ss = strip_uri(prop.subject.to_s)
|
73
|
+
|
74
|
+
self.class.property ss.to_sym, predicate: prop.subject
|
75
|
+
}
|
66
76
|
end
|
67
|
-
|
77
|
+
|
78
|
+
# for testing; DRY up eventually
|
79
|
+
def strip_uri(uri)
|
80
|
+
uri = uri.to_s.dup
|
81
|
+
uri[-1] = '' if uri[-1] == '>'
|
82
|
+
uri.to_s.split('/').last.split('#').last
|
83
|
+
end
|
84
|
+
|
85
|
+
# def method_missing(meth, *args, &block)
|
86
|
+
# if meth.to_s =~ /^find_by_(.+)$/
|
87
|
+
# run_find_by_method($1, *args, &block)
|
88
|
+
# else
|
89
|
+
# super # You *must* call super if you don't handle the
|
90
|
+
# # method, otherwise you'll mess up Ruby's method
|
91
|
+
# # lookup.
|
92
|
+
# end
|
93
|
+
# end
|
68
94
|
end
|
69
95
|
|
70
96
|
def reload_observation
|
@@ -1,238 +1,240 @@
|
|
1
1
|
module PubliSci
|
2
|
-
module
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
2
|
+
module DataSet
|
3
|
+
module ORM
|
4
|
+
class DataCube
|
5
|
+
extend PubliSci::Dataset::DataCube
|
6
|
+
extend PubliSci::Analyzer
|
7
|
+
# extend PubliSci::Metadata
|
8
|
+
extend PubliSci::Query
|
9
|
+
extend PubliSci::Parser
|
10
|
+
|
11
|
+
include PubliSci::Dataset::DataCube
|
12
|
+
include PubliSci::Analyzer
|
13
|
+
include PubliSci::Metadata::Generator
|
14
|
+
include PubliSci::Query
|
15
|
+
include PubliSci::Parser
|
16
|
+
|
17
|
+
attr_accessor :labels
|
18
|
+
attr_accessor :dimensions
|
19
|
+
attr_accessor :measures
|
20
|
+
attr_accessor :obs
|
21
|
+
attr_accessor :meta
|
22
|
+
|
23
|
+
def initialize(options={},do_parse = true)
|
24
|
+
@dimensions = {}
|
25
|
+
@measures = []
|
26
|
+
@obs = []
|
27
|
+
@generator_options = {}
|
28
|
+
@options = {}
|
29
|
+
|
30
|
+
@meta = {}
|
31
|
+
|
32
|
+
parse_options options if do_parse
|
33
|
+
end
|
33
34
|
|
34
|
-
|
35
|
+
def self.load(graph,options={},verbose=false)
|
35
36
|
|
36
37
|
|
37
|
-
|
38
|
+
graph = load_string(graph) unless graph =~ /^http/
|
38
39
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
codes = execute_from_file('code_resources.rq',graph).to_h.map{|sol|
|
52
|
-
[sol[:dimension].to_s, sol[:codeList].to_s, sol[:class].to_s]
|
53
|
-
}
|
54
|
-
puts "codes: #{codes}" if verbose
|
55
|
-
|
56
|
-
measures = execute_from_file('measures.rq',graph).to_h.map{|m| m[:measure].to_s}
|
57
|
-
puts "measures: #{measures}" if verbose
|
58
|
-
|
59
|
-
name = execute_from_file('dataset.rq',graph).to_h.first[:label]
|
60
|
-
puts "dataset: #{name}" if verbose
|
61
|
-
|
62
|
-
obs = execute_from_file('observations.rq',graph)
|
63
|
-
observations = observation_hash(obs)
|
64
|
-
puts "observations: #{observations}" if verbose
|
65
|
-
|
66
|
-
# simple_observations = observation_hash(obs,true)
|
67
|
-
|
68
|
-
labels = execute_from_file('observation_labels.rq', graph)
|
69
|
-
labels = Hash[labels.map{|sol|
|
70
|
-
[sol[:observation].to_s, sol[:label].to_s]
|
71
|
-
}]
|
72
|
-
|
73
|
-
new_opts = {
|
74
|
-
measures: measures,
|
75
|
-
dimensions: dimensions,
|
76
|
-
observations: observations.values,
|
77
|
-
name: name,
|
78
|
-
labels: labels.values,
|
79
|
-
codes: codes
|
80
|
-
}
|
81
|
-
|
82
|
-
options = options.merge(new_opts)
|
83
|
-
puts "creating #{options}" if verbose
|
84
|
-
self.new(options)
|
85
|
-
end
|
40
|
+
# puts get_hashes(execute_from_file('dimension_ranges.rq',graph))
|
41
|
+
dimensions = Hash[get_hashes(execute_from_file('dimension_ranges.rq',graph),"to_s").map{|solution|
|
42
|
+
#TODO coded properties should be found via SPARQL queries
|
43
|
+
if solution[:range].split('/')[-2] == "code"
|
44
|
+
type = :coded
|
45
|
+
else
|
46
|
+
type = solution[:range].to_s
|
47
|
+
end
|
48
|
+
[solution[:dimension], {type: type}]
|
49
|
+
}]
|
50
|
+
puts "dimensions: #{dimensions}" if verbose
|
86
51
|
|
87
|
-
|
88
|
-
|
89
|
-
options[:dimensions].each{|name,details|
|
90
|
-
add_dimension(name, details[:type] || :coded)
|
52
|
+
codes = execute_from_file('code_resources.rq',graph).to_h.map{|sol|
|
53
|
+
[sol[:dimension].to_s, sol[:codeList].to_s, sol[:class].to_s]
|
91
54
|
}
|
92
|
-
|
55
|
+
puts "codes: #{codes}" if verbose
|
93
56
|
|
94
|
-
|
95
|
-
|
96
|
-
end
|
57
|
+
measures = execute_from_file('measures.rq',graph).to_h.map{|m| m[:measure].to_s}
|
58
|
+
puts "measures: #{measures}" if verbose
|
97
59
|
|
98
|
-
|
99
|
-
|
100
|
-
end
|
60
|
+
name = execute_from_file('dataset.rq',graph).to_h.first[:label]
|
61
|
+
puts "dataset: #{name}" if verbose
|
101
62
|
|
102
|
-
|
103
|
-
|
63
|
+
obs = execute_from_file('observations.rq',graph)
|
64
|
+
observations = observation_hash(obs)
|
65
|
+
puts "observations: #{observations}" if verbose
|
104
66
|
|
105
|
-
|
106
|
-
@name = options[:name]
|
107
|
-
else
|
108
|
-
raise "No dataset name specified!"
|
109
|
-
end
|
67
|
+
# simple_observations = observation_hash(obs,true)
|
110
68
|
|
111
|
-
|
112
|
-
|
113
|
-
|
69
|
+
labels = execute_from_file('observation_labels.rq', graph)
|
70
|
+
labels = Hash[labels.map{|sol|
|
71
|
+
[sol[:observation].to_s, sol[:label].to_s]
|
72
|
+
}]
|
114
73
|
|
115
|
-
|
116
|
-
|
117
|
-
|
74
|
+
new_opts = {
|
75
|
+
measures: measures,
|
76
|
+
dimensions: dimensions,
|
77
|
+
observations: observations.values,
|
78
|
+
name: name,
|
79
|
+
labels: labels.values,
|
80
|
+
codes: codes
|
81
|
+
}
|
118
82
|
|
119
|
-
|
120
|
-
|
83
|
+
options = options.merge(new_opts)
|
84
|
+
puts "creating #{options}" if verbose
|
85
|
+
self.new(options)
|
121
86
|
end
|
122
|
-
end
|
123
87
|
|
124
|
-
|
88
|
+
def parse_options(options)
|
89
|
+
if options[:dimensions]
|
90
|
+
options[:dimensions].each{|name,details|
|
91
|
+
add_dimension(name, details[:type] || :coded)
|
92
|
+
}
|
93
|
+
end
|
94
|
+
|
95
|
+
if options[:measures]
|
96
|
+
options[:measures].each{|m| @measures << m}
|
97
|
+
end
|
98
|
+
|
99
|
+
if options[:observations]
|
100
|
+
options[:observations].each{|obs_data| add_observation obs_data}
|
101
|
+
end
|
102
|
+
|
103
|
+
@generator_options = options[:generator_options] if options[:generator_options]
|
104
|
+
@options[:skip_metadata] = options[:skip_metadata] if options[:skip_metadata]
|
125
105
|
|
126
|
-
|
127
|
-
|
128
|
-
if @labels.is_a? Symbol
|
129
|
-
#define some automatic labeling methods
|
106
|
+
if options[:name]
|
107
|
+
@name = options[:name]
|
130
108
|
else
|
131
|
-
|
109
|
+
raise "No dataset name specified!"
|
110
|
+
end
|
111
|
+
|
112
|
+
if options[:validate_each]
|
113
|
+
@options[:validate_each] = options[:validate_each]
|
114
|
+
end
|
115
|
+
|
116
|
+
if options[:labels]
|
117
|
+
@labels = options[:labels]
|
118
|
+
end
|
119
|
+
|
120
|
+
if options[:codes]
|
121
|
+
@codes = options[:codes]
|
132
122
|
end
|
133
123
|
end
|
134
|
-
data = {}
|
135
124
|
|
125
|
+
def to_n3
|
136
126
|
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
date: date(),
|
155
|
-
var: @name,
|
127
|
+
#create labels if not specified
|
128
|
+
unless @labels.is_a?(Array) && @labels.size == @obs.size
|
129
|
+
if @labels.is_a? Symbol
|
130
|
+
#define some automatic labeling methods
|
131
|
+
else
|
132
|
+
@labels = (1..@obs.size).to_a.map(&:to_s)
|
133
|
+
end
|
134
|
+
end
|
135
|
+
data = {}
|
136
|
+
|
137
|
+
|
138
|
+
#collect observation data
|
139
|
+
check_integrity(@obs.map{|o| o.data}, @dimensions.keys, @measures)
|
140
|
+
@obs.map{|obs|
|
141
|
+
(@measures | @dimensions.keys).map{ |component|
|
142
|
+
(data[component] ||= []) << obs.data[component]
|
143
|
+
}
|
156
144
|
}
|
157
|
-
|
158
|
-
|
145
|
+
|
146
|
+
|
147
|
+
@codes = @dimensions.map{|d,v| d if v[:type] == :coded}.compact unless @codes
|
148
|
+
str = generate(@measures, @dimensions.keys, @codes, data, @labels, @name, @generator_options)
|
149
|
+
unless @options[:skip_metadata]
|
150
|
+
fields = {
|
151
|
+
publishers: publishers(),
|
152
|
+
subject: subjects(),
|
153
|
+
author: author(),
|
154
|
+
description: description(),
|
155
|
+
date: date(),
|
156
|
+
var: @name,
|
157
|
+
}
|
158
|
+
# puts basic(fields,@generator_options)
|
159
|
+
str += "\n" + basic(fields)
|
160
|
+
end
|
161
|
+
str
|
159
162
|
end
|
160
|
-
str
|
161
|
-
end
|
162
163
|
|
163
|
-
|
164
|
-
|
165
|
-
|
164
|
+
def add_dimension(name, type=:coded)
|
165
|
+
@dimensions[name.to_s] = {type: type}
|
166
|
+
end
|
166
167
|
|
167
|
-
|
168
|
-
|
169
|
-
|
168
|
+
def add_measure(name)
|
169
|
+
@measures << name
|
170
|
+
end
|
170
171
|
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
172
|
+
def add_observation(data)
|
173
|
+
data = Hash[data.map{|k,v| [k.to_s, v]}]
|
174
|
+
obs = Observation.new(data)
|
175
|
+
check_integrity([obs.data],@dimensions.keys,@measures) if @options[:validate_each]
|
176
|
+
@obs << obs
|
177
|
+
end
|
177
178
|
|
178
|
-
|
179
|
-
|
180
|
-
|
179
|
+
def insert(observation)
|
180
|
+
@obs << observation
|
181
|
+
end
|
181
182
|
|
182
|
-
|
183
|
-
|
184
|
-
|
183
|
+
def publishers
|
184
|
+
@meta[:publishers] ||= []
|
185
|
+
end
|
185
186
|
|
186
|
-
|
187
|
-
|
188
|
-
|
187
|
+
def publishers=(publishers)
|
188
|
+
@meta[:publishers] = publishers
|
189
|
+
end
|
189
190
|
|
190
|
-
|
191
|
-
|
192
|
-
|
191
|
+
def subjects
|
192
|
+
@meta[:subject] ||= []
|
193
|
+
end
|
193
194
|
|
194
|
-
|
195
|
-
|
196
|
-
|
195
|
+
def subjects=(subjects)
|
196
|
+
@meta[:subject]=subjects
|
197
|
+
end
|
197
198
|
|
198
|
-
|
199
|
-
|
200
|
-
|
199
|
+
def add_publisher(label,uri)
|
200
|
+
publishers << {label: label, uri: uri}
|
201
|
+
end
|
201
202
|
|
202
|
-
|
203
|
-
|
204
|
-
|
203
|
+
def add_subject(id)
|
204
|
+
subject << id
|
205
|
+
end
|
205
206
|
|
206
|
-
|
207
|
-
|
208
|
-
|
207
|
+
def author
|
208
|
+
@meta[:creator] ||= ""
|
209
|
+
end
|
209
210
|
|
210
|
-
|
211
|
-
|
212
|
-
|
211
|
+
def author=(author)
|
212
|
+
@meta[:creator] = author
|
213
|
+
end
|
213
214
|
|
214
|
-
|
215
|
-
|
216
|
-
|
215
|
+
def description
|
216
|
+
@meta[:description] ||= ""
|
217
|
+
end
|
217
218
|
|
218
|
-
|
219
|
-
|
220
|
-
|
219
|
+
def description=(description)
|
220
|
+
@meta[:description] = description
|
221
|
+
end
|
221
222
|
|
222
|
-
|
223
|
-
|
224
|
-
|
223
|
+
def date
|
224
|
+
@meta[:date] ||= "#{Time.now.day}-#{Time.now.month}-#{Time.now.year}"
|
225
|
+
end
|
225
226
|
|
226
|
-
|
227
|
-
|
228
|
-
|
227
|
+
def date=(date)
|
228
|
+
@meta[:date] = date
|
229
|
+
end
|
229
230
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
231
|
+
def to_h
|
232
|
+
{
|
233
|
+
measures: @measures,
|
234
|
+
dimensions: @dimensions,
|
235
|
+
observations: @obs.map{|o| o.data}
|
236
|
+
}
|
237
|
+
end
|
236
238
|
end
|
237
239
|
end
|
238
240
|
end
|