dbd 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. data/.gitignore +17 -0
  2. data/.rspec +2 -0
  3. data/.rvmrc +1 -0
  4. data/.travis.yml +10 -0
  5. data/Gemfile +8 -0
  6. data/Guardfile +7 -0
  7. data/LICENSE.txt +22 -0
  8. data/README.md +97 -0
  9. data/Rakefile +1 -0
  10. data/dbd.gemspec +30 -0
  11. data/docs/rationale.md +17 -0
  12. data/docs/stories/001_create_a_fact.txt +15 -0
  13. data/docs/stories/002_create_a_facts_collection.txt +14 -0
  14. data/docs/stories/003_create_a_fact_origin.txt +15 -0
  15. data/docs/stories/004_create_fact_origins_collection.txt +8 -0
  16. data/docs/stories/005_CSV_export_the_graph.txt +18 -0
  17. data/docs/stories/006_refactor_fact_origin_to_provenance_fact.txt +20 -0
  18. data/docs/stories/007_rename_property_to_predicate.txt +6 -0
  19. data/docs/stories/008_testing_different_ruby_versions.txt +7 -0
  20. data/docs/stories/009_build_and_store_resources_with_provenance.txt +38 -0
  21. data/docs/stories/010_provenance_fact_properties_from_provenance_ontology.txt +10 -0
  22. data/docs/test.rb +32 -0
  23. data/lib/dbd.rb +13 -0
  24. data/lib/dbd/errors.rb +11 -0
  25. data/lib/dbd/fact.rb +182 -0
  26. data/lib/dbd/fact/collection.rb +60 -0
  27. data/lib/dbd/fact/id.rb +19 -0
  28. data/lib/dbd/fact/subject.rb +21 -0
  29. data/lib/dbd/graph.rb +47 -0
  30. data/lib/dbd/helpers/ordered_set_collection.rb +86 -0
  31. data/lib/dbd/helpers/uuid.rb +33 -0
  32. data/lib/dbd/provenance_fact.rb +76 -0
  33. data/lib/dbd/provenance_resource.rb +54 -0
  34. data/lib/dbd/rdf.rb +9 -0
  35. data/lib/dbd/repo.rb +8 -0
  36. data/lib/dbd/repo/neo4j_repo.rb +4 -0
  37. data/lib/dbd/repo/neo4j_repo/base.rb +55 -0
  38. data/lib/dbd/resource.rb +117 -0
  39. data/lib/dbd/version.rb +3 -0
  40. data/spec/factories/fact.rb +76 -0
  41. data/spec/factories/provenance_fact.rb +34 -0
  42. data/spec/factories/provenance_resource.rb +16 -0
  43. data/spec/factories/resource.rb +17 -0
  44. data/spec/lib/dbd/fact/collection_spec.rb +236 -0
  45. data/spec/lib/dbd/fact/id_spec.rb +19 -0
  46. data/spec/lib/dbd/fact/subject_spec.rb +19 -0
  47. data/spec/lib/dbd/fact_spec.rb +217 -0
  48. data/spec/lib/dbd/graph_spec.rb +214 -0
  49. data/spec/lib/dbd/helpers/ordered_set_collection_spec.rb +88 -0
  50. data/spec/lib/dbd/helpers/uuid_spec.rb +15 -0
  51. data/spec/lib/dbd/provenance_fact_spec.rb +108 -0
  52. data/spec/lib/dbd/provenance_resource_spec.rb +77 -0
  53. data/spec/lib/dbd/rdf_base_spec.rb +39 -0
  54. data/spec/lib/dbd/repo/neo4j_repo/base_spec.rb +85 -0
  55. data/spec/lib/dbd/repo/neo4j_repo/performance_spec.rb +40 -0
  56. data/spec/lib/dbd/resource_spec.rb +166 -0
  57. data/spec/spec_helper.rb +19 -0
  58. metadata +272 -0
@@ -0,0 +1,17 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --color
2
+ --format progress
data/.rvmrc ADDED
@@ -0,0 +1 @@
1
+ rvm --create use 2.0.0@dbd
@@ -0,0 +1,10 @@
1
+ language: ruby
2
+ bundler_args: --without debug
3
+ script: "bundle exec rspec spec"
4
+ rvm:
5
+ - 2.0.0
6
+ - 1.9.3
7
+ - 1.9.2
8
+ - jruby-19mode
9
+ - ruby-head
10
+ - jruby-head
data/Gemfile ADDED
@@ -0,0 +1,8 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in dbd.gemspec
4
+ gemspec
5
+
6
+ #gem 'rdf', '1.1.0',
7
+ # :git => 'https://github.com/ruby-rdf/rdf',
8
+ # :branch => '1.1'
@@ -0,0 +1,7 @@
1
+ guard 'rspec' do
2
+ watch(%r{^spec/.+_spec\.rb$})
3
+ watch(%r{^spec/factories}) { "spec" }
4
+ watch(%r{^lib/dbd/helpers}) { "spec" }
5
+ watch(%r{^lib/(.+)\.rb$}) { |m| "spec/lib/#{m[1]}_spec.rb" }
6
+ watch('spec/spec_helper.rb') { "spec" }
7
+ end
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2013 Peter Vandenabeele
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,97 @@
1
+ # Dbd: A data store that (almost) never forgets
2
+
3
+ This is facts based data store, inspired by [RDF] concepts, but adding a log based structure and fine-grained provenance.
4
+
5
+ * [Why?][Rationale]
6
+ * <http://github.com/petervandenabeele/dbd>
7
+
8
+ [![Gem Version](https://badge.fury.io/rb/dbd.png)](http://badge.fury.io/rb/dbd)
9
+ [![Build Status](https://travis-ci.org/petervandenabeele/dbd.png?branch=master)](http://travis-ci.org/petervandenabeele/dbd)
10
+
11
+ ## Features
12
+
13
+ * Facts are immutable and ordered (log structured "fact stream")
14
+ * "Big Data" mantra: base facts are immutable (store now, analyse later)
15
+ * only 1 backup file needed: the fact stream contains the full history
16
+ * synchronisation between remote sources is cheap (easy caching)
17
+ * 1 data source has _all_ my data : never more loose stuff :-)
18
+ * facts can be invalidated (and replaced) later on
19
+ * Privacy
20
+ * a "hard delete" is possible: all downstream readers of the fact stream
21
+ must remove this fact and replace the back-up
22
+ * since one single back-up file suffices, replacing the *single* back-up
23
+ file will actually remove the hard deleted fact(s) for good
24
+ * Fine grained Provenance
25
+ * Each base Fact points to a ProvenanceResource, so separate provenance
26
+ possible for different facts about 1 resource
27
+ * can keep the original_source reference, creator, date, …
28
+ * can have a context that allows filtering data (e.g. private, professional, …)
29
+ * separate encryption schemes per context are possible
30
+ * ProvenanceResource is flexible, since built itself from Facts
31
+ * Schemaless
32
+ * uses the [RDF] (subject, predicate, object) concepts
33
+ * predicates, types can be defined in an ontology for declaring meaning
34
+ * Graph based
35
+ * the object of each Fact can be another Resource
36
+ * aimed at exporting to a graph database (e.g. Neo4j) for analysis
37
+
38
+
39
+ ## License
40
+
41
+ Open Source [MIT]
42
+
43
+ ## Installation
44
+
45
+ $ gem install dbd # Ruby 1.9.2, 1.9.3, 2.0.x, jruby (see .travis.yml)
46
+
47
+ ## Examples
48
+
49
+ require 'dbd'
50
+
51
+ provenance = Dbd::ProvenanceResource.new
52
+
53
+ # PREFIX provenance: <https://data.vandenabeele.com/ontologies/provenance#>
54
+ # PREFIX dcterms: <http://purl.org/dc/terms/>
55
+ fact_context_public = Dbd::ProvenanceFact.new(predicate: "provenance:context", object: "public")
56
+ fact_creator_peter_v = Dbd::ProvenanceFact.new(predicate: "dcterms:creator", object: "@peter_v")
57
+ fact_created_at_now = Dbd::ProvenanceFact.new(predicate: "provenance:created_at", object: Time.now.utc)
58
+ provenance << fact_context_public
59
+ provenance << fact_creator_peter_v
60
+ provenance << fact_created_at_now
61
+
62
+ nobel_peace_2012 = Dbd::Resource.new(provenance_subject: provenance.subject)
63
+
64
+ # PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
65
+ # PREFIX base: <https://data.vandenabeele.com/ontologies/base#>
66
+ fact_nobel_peace_2012 = Dbd::Fact.new(predicate: "base:nobelPeacePriceWinner", object: "2012")
67
+ fact_EU_label = Dbd::Fact.new(predicate: "rdfs:label", object: "EU") # this will use some RDF predicates in future
68
+ fact_EU_comment = Dbd::Fact.new(predicate: "rdfs:comment", object: "European Union")
69
+ fact_EU_story = Dbd::Fact.new(predicate: "base:story", object: "A long period of peace,\n that is a \"bliss\".")
70
+ nobel_peace_2012 << fact_nobel_peace_2012
71
+ nobel_peace_2012 << fact_EU_label
72
+ nobel_peace_2012 << fact_EU_comment
73
+ nobel_peace_2012 << fact_EU_story
74
+
75
+ graph = Dbd::Graph.new
76
+
77
+ provenance.each {|provenance_fact| graph << provenance_fact}
78
+ nobel_peace_2012.each {|fact| graph << fact}
79
+
80
+ puts graph.to_CSV
81
+
82
+ results in
83
+
84
+
85
+ $ ruby test.rb
86
+ "611dbc31-6961-4a86-9259-4a2700add783","2013-05-12 21:50:19 UTC","","98b7bb17-9921-4d52-a08a-39667c2abb4c","provenance:context","public"
87
+ "79e9c0e7-b6fd-4735-817c-8c21c97c9575","2013-05-12 21:50:19 UTC","","98b7bb17-9921-4d52-a08a-39667c2abb4c","dcterms:creator","@peter_v"
88
+ "7d143a50-8a63-4637-8ab8-c2aa7fc6e12e","2013-05-12 21:50:19 UTC","","98b7bb17-9921-4d52-a08a-39667c2abb4c","provenance:created_at","2013-05-12 21:50:19 UTC"
89
+ "fd121b00-7934-4e22-81c8-8e810760c686","2013-05-12 21:50:19 UTC","98b7bb17-9921-4d52-a08a-39667c2abb4c","477a2e10-5e34-434d-8fc1-969277f61f9f","base:nobelPeacePriceWinner","2012"
90
+ "2d852fe1-911f-497d-9485-6c24a6000fbb","2013-05-12 21:50:19 UTC","98b7bb17-9921-4d52-a08a-39667c2abb4c","477a2e10-5e34-434d-8fc1-969277f61f9f","rdfs:label","EU"
91
+ "ab00b092-65a3-47c0-b10b-837cb0a5ad81","2013-05-12 21:50:19 UTC","98b7bb17-9921-4d52-a08a-39667c2abb4c","477a2e10-5e34-434d-8fc1-969277f61f9f","rdfs:comment","European Union"
92
+ "a8d6b34b-6e02-4a5e-8529-4785f090866a","2013-05-12 21:50:19 UTC","98b7bb17-9921-4d52-a08a-39667c2abb4c","477a2e10-5e34-434d-8fc1-969277f61f9f","base:story","A long period of peace,
93
+ that is a ""bliss""."
94
+
95
+ [RDF]: http://www.w3.org/RDF/
96
+ [Rationale]: http://github.com/petervandenabeele/dbd/blob/master/docs/rationale.md
97
+ [MIT]: https://github.com/petervandenabeele/dbd/blob/master/LICENSE.txt
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
@@ -0,0 +1,30 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'dbd/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "dbd"
8
+ spec.version = Dbd::VERSION
9
+ spec.authors = ["Peter Vandenabeele"]
10
+ spec.email = ["peter@vandenabeele.com"]
11
+ spec.description = %q{A data store that (almost) never forgets}
12
+ spec.summary = %q{A data store that (almost) never forgets}
13
+ spec.homepage = ""
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files`.split($/)
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_development_dependency 'bundler', '> 1.2.4'
22
+ spec.add_development_dependency 'rake'
23
+ spec.add_development_dependency 'guard-rspec'
24
+ spec.add_development_dependency 'rb-fsevent', '~> 0.9'
25
+ spec.add_development_dependency 'terminal-notifier-guard'
26
+ spec.add_development_dependency 'yard'
27
+ spec.add_runtime_dependency 'neography'
28
+ spec.add_runtime_dependency 'rdf', '~> 1.0.6'
29
+ spec.add_runtime_dependency 'ruby_peter_v'
30
+ end
@@ -0,0 +1,17 @@
1
+ # A data store that (almost) never forgets
2
+
3
+ I am tired of keeping data in many different data stores for different purposes, different contexts and losing long term data (old phone numbers, old contacts, meeting notes, etc. etc.). I want to add new data from personal and online sources and then combine, query, reason on all that data in different contexts (private, professional, for a certain customer, ...).
4
+
5
+ For _all_ facts in the data store, I want to know the "original source" of the data (a very fine grained provenance), who added it, when, etc., so I can check back the sources, filter on the source origin to query and reason only in a certain context, and also use it as a basis for a soft or hard delete of certain data. Typical provenance attributes can be: *context*, *original_source*, *license*, *created_by*, *created_at*, *valid_from*, *valid_until*, etc. An experimental version of a fine grained provenance is at https://data.vandenabeele.com/ontologies/.
6
+
7
+ One feature of the "context" in the provenance is that a "partial export" is possible of data in a certain context (only private, only professional, only for a certain customer etc.). To enforce the separation between the contexts, an advanaced feature is to have separate encryption schemes or keys for different context (initially an overall external encryption is possible, e.g. by mounting the back-end store on encrypted partitions and encrypting the separate "fact stream" files)
8
+
9
+ To allow easy (incremental) backup, synchronization, distributed processing, caching, the low level "fact stream" must be of a "logging" type and the operations must be idempotent. I see this as the fundamental aspect of "Big Data". Older facts are never removed, never modified, although they could be "invalidated" by newer facts (and then possibly replaced on a logical level by new facts). An other advantage of "invalidation" at a certain time stamp, is that it is possible to go back to the logical state at a certain historical moment.
10
+
11
+ One exception is the **"human right to be forgotten"** to which I fully agree. So the possibility for a "hard delete" on certain older facts in the data store is a requirement, implemented in such a way that the resulting data graph after the new "hard delete" of older facts is always valid and exactly equivalent to the "soft delete" case. Because all other data (except "hard deleted" data) is always retained, a newer export will function as a _complete_ back-up, so this can replace the older back-ups and so remove the "hard deleted" entries from all back-ups (to fully implement the "right to be forgotten", also in the back-ups).
12
+
13
+ The logical structure of the data must be defined by a descriptive ontology, so no hard programming is needed to set-up or modify a schema. For the provenance (meta) level, the facts of a provenance entry are also stored in the same fact stream. A specific requirement is that all provenance facts for a provenance subject must be in the fact stream, before any fact uses this provenance subject (the reason is that when a fact is processed, to be loaded e.g. in a back-end for analytics, the full provenance for the fact must be known).
14
+
15
+ Based on the ontology of the data, clients can then query this data store and build automated interfaces for inserting and viewing the logical data graph.
16
+
17
+ (written in Breskens, on 6 April 2013, after a good day of sailing on the Westerschelde)
@@ -0,0 +1,15 @@
1
+ 001_store_a_fact
2
+
3
+ As a client
4
+ I can create a data_fact
5
+
6
+ A data_fact has:
7
+ * a very fine grained date stamp
8
+ (creating a de facto order)
9
+ * a guaranteed unique UUID
10
+ * a fact_origin UUID
11
+
12
+ * a subject ID (a UUID)
13
+ * a property (in a correct Semantic format
14
+ as used in ontology)
15
+ * an object of format String
@@ -0,0 +1,14 @@
1
+ 002_create_a_facts_collection
2
+
3
+ As a client
4
+ I can create a collection of facts
5
+
6
+ A facts_collection has:
7
+ * an ordered list of facts
8
+ * ordered by time_stamp
9
+ * a << function that adds one fact
10
+ * the enumerable functions allow to loop over the facts
11
+ * other array functions (e.g. []) do not work (no random access)
12
+ * the << function will validate that the time_stamp is strictly larger than the most recent time_stamp in the collection
13
+ * there is no way (yet) to add "older" or "same date" facts
14
+ (e.g. from a merge) in a facts_collection
@@ -0,0 +1,15 @@
1
+ 003_create_a_fact_origin.txt
2
+
3
+ As a client
4
+ I can create a fact origin
5
+
6
+ A fact_origin has:
7
+ * id
8
+ * context
9
+ * original_source
10
+ * created_by
11
+ * created_at
12
+ * entered_by
13
+ * entered_at
14
+ * valid_from
15
+ * valid_until
@@ -0,0 +1,8 @@
1
+ 004_create_fact_origins_collection
2
+
3
+ As a client
4
+ I can add and retrieve fact_origins to and from a collection
5
+
6
+ * I create a fact_origin
7
+ * I add the fact_origins to the facts_collection
8
+ * I can efficiently (O(1)) find a fact_origin by id
@@ -0,0 +1,18 @@
1
+ 005_CSV_export_the_graph
2
+
3
+ As a client
4
+ I can ask a CSV export of the graph
5
+
6
+ * a graph is built now as an (ordered) set of
7
+ fact_origin_collections and fact_collections
8
+ (currently limited to 1 fact_origin_collection
9
+ and 1 fact_collection)
10
+
11
+ * use UTF-8
12
+ * use comma's (tabs are too hard to confuse with spaces)
13
+ * use "" around strings
14
+ * use newlines at end of a row
15
+ * allow newlines in the strings inbetween ""
16
+ * try to use default Ruby CSV library
17
+
18
+ * facts are ordered by time_stamp
@@ -0,0 +1,20 @@
1
+ 006_refactor_fact_origin_to_provenance_fact
2
+
3
+ As a client
4
+ I can use a ProvenanceFact
5
+
6
+ * a ProvenanceFact is a subclass of a Fact
7
+ with additional features:
8
+ * it's time_stamp must be earlier than any Fact that
9
+ refers to it through it's provenance_fact_subject.
10
+ (otherwise said: the collection of ProvenanceFacts
11
+ about a subject must be complete before it is used;
12
+ no provenance_fact may be added about a fact that
13
+ already in the fact stream)
14
+ * a graph is represented internally as a single
15
+ fact_collection
16
+ * for the provenance_facts, efficient access to all
17
+ provenance_facts by_subject is possible with a
18
+ ruby Hash (internally coded in O(1) in C)
19
+
20
+ * Facts are ordered by time_stamp
@@ -0,0 +1,6 @@
1
+ 007_rename_property_to_predicate
2
+
3
+ As a client
4
+ I can use a Fact with standard terms "subject, predicate, object"
5
+
6
+ * refactor property into predicate
@@ -0,0 +1,7 @@
1
+ 008_testing_different_ruby_versions
2
+
3
+ As a client
4
+ I can use the library on Ruby 1.9.2 , 1.9.3 , 2.0.0, JRuby
5
+
6
+ * test once on these versions
7
+ * find a way for automated testing (travis ?)
@@ -0,0 +1,38 @@
1
+ 009_build_and_store_resources_with_provenance
2
+
3
+ As a client
4
+ I can build and store a group of resources with provenance
5
+
6
+ * rename ArrayCollection to OrderedSetCollection
7
+
8
+ * clean-up the dependency on external UUID gem
9
+
10
+ * make a FactsBySubject model
11
+ * add subject and facts related methods
12
+
13
+ * make a FactsWithProvenance model
14
+ * add public build methods on it
15
+
16
+ * add validations on Fact::Collection << for Fact#complete?
17
+
18
+ * rename FactsBySubject to Resource
19
+
20
+ * rename FactsWithProvenance to ResourcesWithProvenance
21
+
22
+ * on Resource, enforce the presence of a subject and set it on the facts
23
+
24
+ * on Resource, enforce the presence of a provenance_subject and set it on the facts
25
+
26
+ * on ProvenanceResource, enforce the presence of a subject and set it on the facts
27
+
28
+ * rename provenance_fact_subject to provenance_subject
29
+
30
+ * change arguments for (Provenance)Resource to an options hash
31
+
32
+ * remove the ResourcesWithProvenance
33
+
34
+ * change arguments for (Provenance)Fact to an options hash
35
+
36
+ * add a store method on Graph
37
+ * that will store a (Provenance)Resource instance
38
+ * this will now set the time_stamps (enforcing the strictly monotic order)
@@ -0,0 +1,10 @@
1
+ 010_provenance_fact_predicates_from_provenance_ontology
2
+
3
+ As a client
4
+ I can read the predicates for a provenance_fact from provenance ontology
5
+
6
+ * make an ontology in a separate gem
7
+ * import that gem
8
+ * use this ontology to build a validated Provenance Resource
9
+ * use this validate Provenance Resource instance to better buildi
10
+ a ProvenanceResource
@@ -0,0 +1,32 @@
1
+ require 'dbd'
2
+
3
+ provenance = Dbd::ProvenanceResource.new
4
+
5
+ # PREFIX provenance: <https://data.vandenabeele.com/ontologies/provenance#>
6
+ # PREFIX dcterms: <http://purl.org/dc/terms/>
7
+ fact_context_public = Dbd::ProvenanceFact.new(predicate: "provenance:context", object: "public")
8
+ fact_creator_peter_v = Dbd::ProvenanceFact.new(predicate: "dcterms:creator", object: "@peter_v")
9
+ fact_created_at_now = Dbd::ProvenanceFact.new(predicate: "provenance:created_at", object: Time.now.utc)
10
+ provenance << fact_context_public
11
+ provenance << fact_creator_peter_v
12
+ provenance << fact_created_at_now
13
+
14
+ nobel_peace_2012 = Dbd::Resource.new(provenance_subject: provenance.subject)
15
+
16
+ # PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
17
+ # PREFIX base: <https://data.vandenabeele.com/ontologies/base#>
18
+ fact_nobel_peace_2012 = Dbd::Fact.new(predicate: "base:nobelPeacePriceWinner", object: "2012")
19
+ fact_EU_label = Dbd::Fact.new(predicate: "rdfs:label", object: "EU") # this will use some RDF predicates in future
20
+ fact_EU_comment = Dbd::Fact.new(predicate: "rdfs:comment", object: "European Union")
21
+ fact_EU_story = Dbd::Fact.new(predicate: "base:story", object: "A long period of peace,\n that is a \"bliss\".")
22
+ nobel_peace_2012 << fact_nobel_peace_2012
23
+ nobel_peace_2012 << fact_EU_label
24
+ nobel_peace_2012 << fact_EU_comment
25
+ nobel_peace_2012 << fact_EU_story
26
+
27
+ graph = Dbd::Graph.new
28
+
29
+ provenance.each {|provenance_fact| graph << provenance_fact}
30
+ nobel_peace_2012.each {|fact| graph << fact}
31
+
32
+ puts graph.to_CSV
@@ -0,0 +1,13 @@
1
+ require 'ruby_peter_v'
2
+
3
+ require 'dbd/version'
4
+
5
+ require 'dbd/errors'
6
+ require 'dbd/fact'
7
+ require 'dbd/provenance_fact'
8
+ require 'dbd/resource'
9
+ require 'dbd/provenance_resource'
10
+ require 'dbd/graph'
11
+
12
+ require 'dbd/rdf'
13
+ require 'dbd/repo'