rpbertp13-dm-core 0.9.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. data/.autotest +26 -0
  2. data/.gitignore +18 -0
  3. data/CONTRIBUTING +51 -0
  4. data/FAQ +92 -0
  5. data/History.txt +52 -0
  6. data/MIT-LICENSE +22 -0
  7. data/Manifest.txt +130 -0
  8. data/QUICKLINKS +11 -0
  9. data/README.txt +143 -0
  10. data/Rakefile +32 -0
  11. data/SPECS +62 -0
  12. data/TODO +1 -0
  13. data/dm-core.gemspec +40 -0
  14. data/lib/dm-core.rb +217 -0
  15. data/lib/dm-core/adapters.rb +16 -0
  16. data/lib/dm-core/adapters/abstract_adapter.rb +209 -0
  17. data/lib/dm-core/adapters/data_objects_adapter.rb +716 -0
  18. data/lib/dm-core/adapters/in_memory_adapter.rb +87 -0
  19. data/lib/dm-core/adapters/mysql_adapter.rb +138 -0
  20. data/lib/dm-core/adapters/postgres_adapter.rb +189 -0
  21. data/lib/dm-core/adapters/sqlite3_adapter.rb +105 -0
  22. data/lib/dm-core/associations.rb +207 -0
  23. data/lib/dm-core/associations/many_to_many.rb +147 -0
  24. data/lib/dm-core/associations/many_to_one.rb +107 -0
  25. data/lib/dm-core/associations/one_to_many.rb +315 -0
  26. data/lib/dm-core/associations/one_to_one.rb +61 -0
  27. data/lib/dm-core/associations/relationship.rb +221 -0
  28. data/lib/dm-core/associations/relationship_chain.rb +81 -0
  29. data/lib/dm-core/auto_migrations.rb +105 -0
  30. data/lib/dm-core/collection.rb +670 -0
  31. data/lib/dm-core/dependency_queue.rb +32 -0
  32. data/lib/dm-core/hook.rb +11 -0
  33. data/lib/dm-core/identity_map.rb +42 -0
  34. data/lib/dm-core/is.rb +16 -0
  35. data/lib/dm-core/logger.rb +232 -0
  36. data/lib/dm-core/migrations/destructive_migrations.rb +17 -0
  37. data/lib/dm-core/migrator.rb +29 -0
  38. data/lib/dm-core/model.rb +526 -0
  39. data/lib/dm-core/naming_conventions.rb +84 -0
  40. data/lib/dm-core/property.rb +676 -0
  41. data/lib/dm-core/property_set.rb +169 -0
  42. data/lib/dm-core/query.rb +676 -0
  43. data/lib/dm-core/repository.rb +167 -0
  44. data/lib/dm-core/resource.rb +671 -0
  45. data/lib/dm-core/scope.rb +58 -0
  46. data/lib/dm-core/support.rb +7 -0
  47. data/lib/dm-core/support/array.rb +13 -0
  48. data/lib/dm-core/support/assertions.rb +8 -0
  49. data/lib/dm-core/support/errors.rb +23 -0
  50. data/lib/dm-core/support/kernel.rb +11 -0
  51. data/lib/dm-core/support/symbol.rb +41 -0
  52. data/lib/dm-core/transaction.rb +252 -0
  53. data/lib/dm-core/type.rb +160 -0
  54. data/lib/dm-core/type_map.rb +80 -0
  55. data/lib/dm-core/types.rb +19 -0
  56. data/lib/dm-core/types/boolean.rb +7 -0
  57. data/lib/dm-core/types/discriminator.rb +34 -0
  58. data/lib/dm-core/types/object.rb +24 -0
  59. data/lib/dm-core/types/paranoid_boolean.rb +34 -0
  60. data/lib/dm-core/types/paranoid_datetime.rb +33 -0
  61. data/lib/dm-core/types/serial.rb +9 -0
  62. data/lib/dm-core/types/text.rb +10 -0
  63. data/lib/dm-core/version.rb +3 -0
  64. data/script/all +4 -0
  65. data/script/performance.rb +282 -0
  66. data/script/profile.rb +87 -0
  67. data/spec/integration/association_spec.rb +1382 -0
  68. data/spec/integration/association_through_spec.rb +203 -0
  69. data/spec/integration/associations/many_to_many_spec.rb +449 -0
  70. data/spec/integration/associations/many_to_one_spec.rb +163 -0
  71. data/spec/integration/associations/one_to_many_spec.rb +188 -0
  72. data/spec/integration/auto_migrations_spec.rb +413 -0
  73. data/spec/integration/collection_spec.rb +1073 -0
  74. data/spec/integration/data_objects_adapter_spec.rb +32 -0
  75. data/spec/integration/dependency_queue_spec.rb +46 -0
  76. data/spec/integration/model_spec.rb +197 -0
  77. data/spec/integration/mysql_adapter_spec.rb +85 -0
  78. data/spec/integration/postgres_adapter_spec.rb +731 -0
  79. data/spec/integration/property_spec.rb +253 -0
  80. data/spec/integration/query_spec.rb +514 -0
  81. data/spec/integration/repository_spec.rb +61 -0
  82. data/spec/integration/resource_spec.rb +513 -0
  83. data/spec/integration/sqlite3_adapter_spec.rb +352 -0
  84. data/spec/integration/sti_spec.rb +273 -0
  85. data/spec/integration/strategic_eager_loading_spec.rb +156 -0
  86. data/spec/integration/transaction_spec.rb +60 -0
  87. data/spec/integration/type_spec.rb +275 -0
  88. data/spec/lib/logging_helper.rb +18 -0
  89. data/spec/lib/mock_adapter.rb +27 -0
  90. data/spec/lib/model_loader.rb +100 -0
  91. data/spec/lib/publicize_methods.rb +28 -0
  92. data/spec/models/content.rb +16 -0
  93. data/spec/models/vehicles.rb +34 -0
  94. data/spec/models/zoo.rb +48 -0
  95. data/spec/spec.opts +3 -0
  96. data/spec/spec_helper.rb +91 -0
  97. data/spec/unit/adapters/abstract_adapter_spec.rb +133 -0
  98. data/spec/unit/adapters/adapter_shared_spec.rb +15 -0
  99. data/spec/unit/adapters/data_objects_adapter_spec.rb +632 -0
  100. data/spec/unit/adapters/in_memory_adapter_spec.rb +98 -0
  101. data/spec/unit/adapters/postgres_adapter_spec.rb +133 -0
  102. data/spec/unit/associations/many_to_many_spec.rb +32 -0
  103. data/spec/unit/associations/many_to_one_spec.rb +159 -0
  104. data/spec/unit/associations/one_to_many_spec.rb +393 -0
  105. data/spec/unit/associations/one_to_one_spec.rb +7 -0
  106. data/spec/unit/associations/relationship_spec.rb +71 -0
  107. data/spec/unit/associations_spec.rb +242 -0
  108. data/spec/unit/auto_migrations_spec.rb +111 -0
  109. data/spec/unit/collection_spec.rb +182 -0
  110. data/spec/unit/data_mapper_spec.rb +35 -0
  111. data/spec/unit/identity_map_spec.rb +126 -0
  112. data/spec/unit/is_spec.rb +80 -0
  113. data/spec/unit/migrator_spec.rb +33 -0
  114. data/spec/unit/model_spec.rb +321 -0
  115. data/spec/unit/naming_conventions_spec.rb +36 -0
  116. data/spec/unit/property_set_spec.rb +90 -0
  117. data/spec/unit/property_spec.rb +753 -0
  118. data/spec/unit/query_spec.rb +571 -0
  119. data/spec/unit/repository_spec.rb +93 -0
  120. data/spec/unit/resource_spec.rb +649 -0
  121. data/spec/unit/scope_spec.rb +142 -0
  122. data/spec/unit/transaction_spec.rb +469 -0
  123. data/spec/unit/type_map_spec.rb +114 -0
  124. data/spec/unit/type_spec.rb +119 -0
  125. data/tasks/ci.rb +36 -0
  126. data/tasks/dm.rb +63 -0
  127. data/tasks/doc.rb +20 -0
  128. data/tasks/gemspec.rb +23 -0
  129. data/tasks/hoe.rb +46 -0
  130. data/tasks/install.rb +20 -0
  131. metadata +215 -0
@@ -0,0 +1,32 @@
1
+ module DataMapper
2
+ ##
3
+ #
4
+ # DataMapper's DependencyQueue is used to store callbacks for classes which
5
+ # may or may not be loaded already.
6
+ #
7
+ class DependencyQueue
8
+ def initialize
9
+ @dependencies = {}
10
+ end
11
+
12
+ def add(class_name, &callback)
13
+ @dependencies[class_name] ||= []
14
+ @dependencies[class_name] << callback
15
+ resolve!
16
+ end
17
+
18
+ def resolve!
19
+ @dependencies.each do |class_name, callbacks|
20
+ begin
21
+ klass = Object.find_const(class_name)
22
+ callbacks.each do |callback|
23
+ callback.call(klass)
24
+ end
25
+ callbacks.clear
26
+ rescue NameError
27
+ end
28
+ end
29
+ end
30
+
31
+ end # class DependencyQueue
32
+ end # module DataMapper
@@ -0,0 +1,11 @@
1
+ module DataMapper
2
+ module Hook
3
+ def self.included(model)
4
+ model.class_eval <<-EOS, __FILE__, __LINE__
5
+ include Extlib::Hook
6
+ register_instance_hooks :save, :create, :update, :destroy
7
+ EOS
8
+ end
9
+ end
10
+ DataMapper::Resource.append_inclusions Hook
11
+ end # module DataMapper
@@ -0,0 +1,42 @@
1
+ module DataMapper
2
+
3
+ # Tracks objects to help ensure that each object gets loaded only once.
4
+ # See: http://www.martinfowler.com/eaaCatalog/identityMap.html
5
+ class IdentityMap
6
+ # Get a resource from the IdentityMap
7
+ def get(key)
8
+ @cache[key] || (@second_level_cache && @second_level_cache.get(key))
9
+ end
10
+
11
+ alias [] get
12
+
13
+ # Add a resource to the IdentityMap
14
+ def set(key, resource)
15
+ @second_level_cache.set(key, resource) if @second_level_cache
16
+ @cache[key] = resource
17
+ end
18
+
19
+ alias []= set
20
+
21
+ # Remove a resource from the IdentityMap
22
+ def delete(key)
23
+ @second_level_cache.delete(key) if @second_level_cache
24
+ @cache.delete(key)
25
+ end
26
+
27
+ private
28
+
29
+ def initialize(second_level_cache = nil)
30
+ @cache = {}
31
+ @second_level_cache = second_level_cache
32
+ end
33
+
34
+ def cache
35
+ @cache
36
+ end
37
+
38
+ def method_missing(method, *args, &block)
39
+ cache.__send__(method, *args, &block)
40
+ end
41
+ end # class IdentityMap
42
+ end # module DataMapper
data/lib/dm-core/is.rb ADDED
@@ -0,0 +1,16 @@
1
+ module DataMapper
2
+ module Is
3
+
4
+ def is(plugin, *pars, &block)
5
+ generator_method = "is_#{plugin}".to_sym
6
+
7
+ if self.respond_to?(generator_method)
8
+ self.send(generator_method, *pars, &block)
9
+ else
10
+ raise PluginNotFoundError, "could not find plugin named #{plugin}"
11
+ end
12
+ end
13
+
14
+ Model.send(:include, self)
15
+ end # module Is
16
+ end # module DataMapper
@@ -0,0 +1,232 @@
1
+ require "time" # httpdate
2
+ # ==== Public DataMapper Logger API
3
+ #
4
+ # Logger taken from Merb :)
5
+ #
6
+ # To replace an existing logger with a new one:
7
+ # DataMapper.logger.set_log(log{String, IO},level{Symbol, String})
8
+ #
9
+ # Available logging levels are:
10
+ # :off, :fatal, :error, :warn, :info, :debug
11
+ #
12
+ # Logging via:
13
+ # DataMapper.logger.fatal(message<String>)
14
+ # DataMapper.logger.error(message<String>)
15
+ # DataMapper.logger.warn(message<String>)
16
+ # DataMapper.logger.info(message<String>)
17
+ # DataMapper.logger.debug(message<String>)
18
+ #
19
+ # Flush the buffer to
20
+ # DataMapper.logger.flush
21
+ #
22
+ # Remove the current log object
23
+ # DataMapper.logger.close
24
+ #
25
+ # ==== Private DataMapper Logger API
26
+ #
27
+ # To initialize the logger you create a new object, proxies to set_log.
28
+ # DataMapper::Logger.new(log{String, IO}, level{Symbol, String})
29
+ #
30
+ # Logger will not create the file until something is actually logged
31
+ # This avoids file creation on DataMapper init when it creates the
32
+ # default logger.
33
+ module DataMapper
34
+
35
+ class << self #:nodoc:
36
+ attr_accessor :logger
37
+ end
38
+
39
+ class Logger
40
+
41
+ attr_accessor :aio
42
+ attr_accessor :delimiter
43
+ attr_reader :level
44
+ attr_reader :buffer
45
+ attr_reader :log
46
+
47
+ # @note
48
+ # Ruby (standard) logger levels:
49
+ # off: absolutely nothing
50
+ # fatal: an unhandleable error that results in a program crash
51
+ # error: a handleable error condition
52
+ # warn: a warning
53
+ # info: generic (useful) information about system operation
54
+ # debug: low-level information for developers
55
+ #
56
+ # DataMapper::Logger::LEVELS[:off, :fatal, :error, :warn, :info, :debug]
57
+ LEVELS =
58
+ {
59
+ :off => 99999,
60
+ :fatal => 7,
61
+ :error => 6,
62
+ :warn => 4,
63
+ :info => 3,
64
+ :debug => 0
65
+ }
66
+
67
+ def level=(new_level)
68
+ @level = LEVELS[new_level.to_sym]
69
+ reset_methods(:close)
70
+ end
71
+
72
+ private
73
+
74
+ # The idea here is that instead of performing an 'if' conditional check on
75
+ # each logging we do it once when the log object is setup
76
+ def set_write_method
77
+ @log.instance_eval do
78
+
79
+ # Determine if asynchronous IO can be used
80
+ def aio?
81
+ @aio = !RUBY_PLATFORM.match(/java|mswin/) &&
82
+ !(@log == STDOUT) &&
83
+ @log.respond_to?(:write_nonblock)
84
+ end
85
+
86
+ # Define the write method based on if aio an be used
87
+ undef write_method if defined? write_method
88
+ if aio?
89
+ alias :write_method :write_nonblock
90
+ else
91
+ alias :write_method :write
92
+ end
93
+ end
94
+ end
95
+
96
+ def initialize_log(log)
97
+ close if @log # be sure that we don't leave open files laying around.
98
+ @log = log || "log/dm.log"
99
+ end
100
+
101
+ def reset_methods(o_or_c)
102
+ if o_or_c == :open
103
+ alias internal_push push_opened
104
+ elsif o_or_c == :close
105
+ alias internal_push push_closed
106
+ end
107
+ end
108
+
109
+ def push_opened(string)
110
+ message = Time.now.httpdate
111
+ message << delimiter
112
+ message << string
113
+ message << "\n" unless message[-1] == ?\n
114
+ @buffer << message
115
+ flush # Force a flush for now until we figure out where we want to use the buffering.
116
+ end
117
+
118
+ def push_closed(string)
119
+ unless @log.respond_to?(:write)
120
+ log = Pathname(@log)
121
+ log.dirname.mkpath
122
+ @log = log.open('a')
123
+ @log.sync = true
124
+ end
125
+ set_write_method
126
+ reset_methods(:open)
127
+ push(string)
128
+ end
129
+
130
+ alias internal_push push_closed
131
+
132
+ def prep_msg(message, level)
133
+ level << delimiter << message
134
+ end
135
+
136
+ public
137
+
138
+ # To initialize the logger you create a new object, proxies to set_log.
139
+ # DataMapper::Logger.new(log{String, IO},level{Symbol, String})
140
+ #
141
+ # @param log<IO,String> either an IO object or a name of a logfile.
142
+ # @param log_level<String> the message string to be logged
143
+ # @param delimiter<String> delimiter to use between message sections
144
+ # @param log_creation<Boolean> log that the file is being created
145
+ def initialize(*args)
146
+ set_log(*args)
147
+ end
148
+
149
+ # To replace an existing logger with a new one:
150
+ # DataMapper.logger.set_log(log{String, IO},level{Symbol, String})
151
+ #
152
+ # @param log<IO,String> either an IO object or a name of a logfile.
153
+ # @param log_level<Symbol> a symbol representing the log level from
154
+ # {:off, :fatal, :error, :warn, :info, :debug}
155
+ # @param delimiter<String> delimiter to use between message sections
156
+ # @param log_creation<Boolean> log that the file is being created
157
+ def set_log(log, log_level = :off, delimiter = " ~ ", log_creation = false)
158
+ delimiter ||= " ~ "
159
+
160
+ if log_level && LEVELS[log_level.to_sym]
161
+ self.level = log_level.to_sym
162
+ else
163
+ self.level = :debug
164
+ end
165
+
166
+ @buffer = []
167
+ @delimiter = delimiter
168
+
169
+ initialize_log(log)
170
+
171
+ DataMapper.logger = self
172
+
173
+ self.info("Logfile created") if log_creation
174
+ end
175
+
176
+ # Flush the entire buffer to the log object.
177
+ # DataMapper.logger.flush
178
+ #
179
+ def flush
180
+ return unless @buffer.size > 0
181
+ @log.write_method(@buffer.slice!(0..-1).join)
182
+ end
183
+
184
+ # Close and remove the current log object.
185
+ # DataMapper.logger.close
186
+ #
187
+ def close
188
+ flush
189
+ @log.close if @log.respond_to?(:close)
190
+ @log = nil
191
+ end
192
+
193
+ # Appends a string and log level to logger's buffer.
194
+
195
+ # @note
196
+ # Note that the string is discarded if the string's log level less than the
197
+ # logger's log level.
198
+ # @note
199
+ # Note that if the logger is aio capable then the logger will use
200
+ # non-blocking asynchronous writes.
201
+ #
202
+ # @param level<Fixnum> the logging level as an integer
203
+ # @param string<String> the message string to be logged
204
+ def push(string)
205
+ internal_push(string)
206
+ end
207
+ alias << push
208
+
209
+ # Generate the following logging methods for DataMapper.logger as described
210
+ # in the API:
211
+ # :fatal, :error, :warn, :info, :debug
212
+ # :off only gets a off? method
213
+ LEVELS.each_pair do |name, number|
214
+ unless name.to_s == 'off'
215
+ class_eval <<-EOS, __FILE__, __LINE__
216
+ # DOC
217
+ def #{name}(message)
218
+ self.<<( prep_msg(message, "#{name}") ) if #{name}?
219
+ end
220
+ EOS
221
+ end
222
+
223
+ class_eval <<-EOS, __FILE__, __LINE__
224
+ # DOC
225
+ def #{name}?
226
+ #{number} >= level
227
+ end
228
+ EOS
229
+ end
230
+
231
+ end # class Logger
232
+ end # module DataMapper
@@ -0,0 +1,17 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ module DestructiveMigrations
5
+ def self.included(model)
6
+ DestructiveMigrator.models << model
7
+ end
8
+ end # module DestructiveMigrations
9
+
10
+ class DestructiveMigrator < Migrator
11
+ def self.migrate(repository_name)
12
+ models.each do |model|
13
+ model.auto_migrate!
14
+ end
15
+ end
16
+ end # class DestructiveMigrator
17
+ end # module DataMapper
@@ -0,0 +1,29 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ class Migrator
5
+ def self.subclasses
6
+ @@subclasses ||= []
7
+ end
8
+
9
+ def self.subclasses=(obj)
10
+ @@subclasses = obj
11
+ end
12
+
13
+ def self.inherited(klass)
14
+ subclasses << klass
15
+
16
+ class << klass
17
+ def models
18
+ @models ||= []
19
+ end
20
+ end
21
+ end
22
+
23
+ def self.migrate(repository_name)
24
+ subclasses.collect do |migrator|
25
+ migrator.migrate(repository_name)
26
+ end.flatten
27
+ end
28
+ end # class Migrator
29
+ end # module DataMapper
@@ -0,0 +1,526 @@
1
+ require 'set'
2
+
3
+ module DataMapper
4
+ module Model
5
+ ##
6
+ #
7
+ # Extends the model with this module after DataMapper::Resource has been
8
+ # included.
9
+ #
10
+ # This is a useful way to extend DataMapper::Model while
11
+ # still retaining a self.extended method.
12
+ #
13
+ # @param [Module] extensions the module that is to be extend the model after
14
+ # after DataMapper::Model
15
+ #
16
+ # @return [TrueClass, FalseClass] whether or not the inclusions have been
17
+ # successfully appended to the list
18
+ #-
19
+ # @api public
20
+ #
21
+ # TODO: Move this do DataMapper::Model when DataMapper::Model is created
22
+ def self.append_extensions(*extensions)
23
+ extra_extensions.concat extensions
24
+ true
25
+ end
26
+
27
+ def self.extra_extensions
28
+ @extra_extensions ||= []
29
+ end
30
+
31
+ def self.extended(model)
32
+ model.instance_variable_set(:@storage_names, {})
33
+ model.instance_variable_set(:@properties, {})
34
+ model.instance_variable_set(:@field_naming_conventions, {})
35
+ extra_extensions.each { |extension| model.extend(extension) }
36
+ end
37
+
38
+ def inherited(target)
39
+ target.instance_variable_set(:@storage_names, @storage_names.dup)
40
+ target.instance_variable_set(:@properties, {})
41
+ target.instance_variable_set(:@base_model, self.base_model)
42
+ target.instance_variable_set(:@paranoid_properties, @paranoid_properties)
43
+ target.instance_variable_set(:@field_naming_conventions, @field_naming_conventions.dup)
44
+
45
+ if self.respond_to?(:validators)
46
+ @validations.contexts.each do |context, validators|
47
+ validators.each { |validator| target.validators.context(context) << validator }
48
+ end
49
+ end
50
+
51
+ @properties.each do |repository_name,properties|
52
+ repository(repository_name) do
53
+ properties.each do |property|
54
+ next if target.properties(repository_name).has_property?(property.name)
55
+ target.property(property.name, property.type, property.options.dup)
56
+ end
57
+ end
58
+ end
59
+
60
+ if @relationships
61
+ duped_relationships = {}
62
+ @relationships.each do |repository_name,relationships|
63
+ relationships.each do |name, relationship|
64
+ dup = relationship.dup
65
+ dup.instance_variable_set(:@child_model, target) if dup.instance_variable_get(:@child_model) == self
66
+ dup.instance_variable_set(:@parent_model, target) if dup.instance_variable_get(:@parent_model) == self
67
+ duped_relationships[repository_name] ||= {}
68
+ duped_relationships[repository_name][name] = dup
69
+ end
70
+ end
71
+ target.instance_variable_set(:@relationships, duped_relationships)
72
+ end
73
+ end
74
+
75
+ def self.new(storage_name, &block)
76
+ model = Class.new
77
+ model.send(:include, Resource)
78
+ model.class_eval <<-EOS, __FILE__, __LINE__
79
+ def self.default_storage_name
80
+ #{Extlib::Inflection.classify(storage_name).inspect}
81
+ end
82
+ EOS
83
+ model.instance_eval(&block) if block_given?
84
+ model
85
+ end
86
+
87
+ def base_model
88
+ @base_model ||= self
89
+ end
90
+
91
+ def repository_name
92
+ Repository.context.any? ? Repository.context.last.name : default_repository_name
93
+ end
94
+
95
+ ##
96
+ # Get the repository with a given name, or the default one for the current
97
+ # context, or the default one for this class.
98
+ #
99
+ # @param name<Symbol> the name of the repository wanted
100
+ # @param block<Block> block to execute with the fetched repository as parameter
101
+ #
102
+ # @return <Object, DataMapper::Respository> whatever the block returns,
103
+ # if given a block, otherwise the requested repository.
104
+ #-
105
+ # @api public
106
+ def repository(name = nil)
107
+ #
108
+ # There has been a couple of different strategies here, but me (zond) and dkubb are at least
109
+ # united in the concept of explicitness over implicitness. That is - the explicit wish of the
110
+ # caller (+name+) should be given more priority than the implicit wish of the caller (Repository.context.last).
111
+ #
112
+ if block_given?
113
+ DataMapper.repository(name || repository_name) { |*block_args| yield(*block_args) }
114
+ else
115
+ DataMapper.repository(name || repository_name)
116
+ end
117
+ end
118
+
119
+ ##
120
+ # the name of the storage recepticle for this resource. IE. table name, for database stores
121
+ #
122
+ # @return <String> the storage name (IE table name, for database stores) associated with this resource in the given repository
123
+ def storage_name(repository_name = default_repository_name)
124
+ @storage_names[repository_name] ||= repository(repository_name).adapter.resource_naming_convention.call(base_model.send(:default_storage_name))
125
+ end
126
+
127
+ ##
128
+ # the names of the storage recepticles for this resource across all repositories
129
+ #
130
+ # @return <Hash(Symbol => String)> All available names of storage recepticles
131
+ def storage_names
132
+ @storage_names
133
+ end
134
+
135
+ ##
136
+ # The field naming conventions for this resource across all repositories.
137
+ #
138
+ # @return <String> The naming convention for the given repository
139
+ def field_naming_convention(repository_name = default_storage_name)
140
+ @field_naming_conventions[repository_name] ||= repository(repository_name).adapter.field_naming_convention
141
+ end
142
+
143
+ ##
144
+ # defines a property on the resource
145
+ #
146
+ # @param <Symbol> name the name for which to call this property
147
+ # @param <Type> type the type to define this property ass
148
+ # @param <Hash(Symbol => String)> options a hash of available options
149
+ # @see DataMapper::Property
150
+ def property(name, type, options = {})
151
+ property = Property.new(self, name, type, options)
152
+
153
+ create_property_getter(property)
154
+ create_property_setter(property)
155
+
156
+ properties(repository_name)[property.name] = property
157
+ @_valid_relations = false
158
+
159
+ # Add property to the other mappings as well if this is for the default
160
+ # repository.
161
+ if repository_name == default_repository_name
162
+ @properties.each_pair do |repository_name, properties|
163
+ next if repository_name == default_repository_name
164
+ properties << property unless properties.has_property?(property.name)
165
+ end
166
+ end
167
+
168
+ # Add the property to the lazy_loads set for this resources repository
169
+ # only.
170
+ # TODO Is this right or should we add the lazy contexts to all
171
+ # repositories?
172
+ if property.lazy?
173
+ context = options.fetch(:lazy, :default)
174
+ context = :default if context == true
175
+
176
+ Array(context).each do |item|
177
+ properties(repository_name).lazy_context(item) << name
178
+ end
179
+ end
180
+
181
+ # add the property to the child classes only if the property was
182
+ # added after the child classes' properties have been copied from
183
+ # the parent
184
+ if respond_to?(:descendants)
185
+ descendants.each do |model|
186
+ next if model.properties(repository_name).has_property?(name)
187
+ model.property(name, type, options)
188
+ end
189
+ end
190
+
191
+ property
192
+ end
193
+
194
+ def repositories
195
+ [ repository ].to_set + @properties.keys.collect { |repository_name| DataMapper.repository(repository_name) }
196
+ end
197
+
198
+ def properties(repository_name = default_repository_name)
199
+ # We need to check whether all relations are already set up.
200
+ # If this isn't the case, we try to reload them here
201
+ if !@_valid_relations && respond_to?(:many_to_one_relationships)
202
+ @_valid_relations = true
203
+ begin
204
+ many_to_one_relationships.each do |r|
205
+ r.child_key
206
+ end
207
+ rescue NameError
208
+ # Apparently not all relations are loaded,
209
+ # so we will try again later on
210
+ @_valid_relations = false
211
+ end
212
+ end
213
+ @properties[repository_name] ||= repository_name == Repository.default_name ? PropertySet.new : properties(Repository.default_name).dup
214
+ end
215
+
216
+ def eager_properties(repository_name = default_repository_name)
217
+ properties(repository_name).defaults
218
+ end
219
+
220
+ # @api private
221
+ def properties_with_subclasses(repository_name = default_repository_name)
222
+ properties = PropertySet.new
223
+ ([ self ].to_set + (respond_to?(:descendants) ? descendants : [])).each do |model|
224
+ model.relationships(repository_name).each_value { |relationship| relationship.child_key }
225
+ model.many_to_one_relationships.each do |relationship| relationship.child_key end
226
+ model.properties(repository_name).each do |property|
227
+ properties << property unless properties.has_property?(property.name)
228
+ end
229
+ end
230
+ properties
231
+ end
232
+
233
+ def key(repository_name = default_repository_name)
234
+ properties(repository_name).key
235
+ end
236
+
237
+ def inheritance_property(repository_name = default_repository_name)
238
+ @properties[repository_name].detect { |property| property.type == DataMapper::Types::Discriminator }
239
+ end
240
+
241
+ def default_order(repository_name = default_repository_name)
242
+ @default_order ||= {}
243
+ @default_order[repository_name] ||= key(repository_name).map { |property| Query::Direction.new(property) }
244
+ end
245
+
246
+ def get(*key)
247
+ key = typecast_key(key)
248
+ repository.identity_map(self).get(key) || first(to_query(repository, key))
249
+ end
250
+
251
+ def get!(*key)
252
+ get(*key) || raise(ObjectNotFoundError, "Could not find #{self.name} with key #{key.inspect}")
253
+ end
254
+
255
+ def all(query = {})
256
+ query = scoped_query(query)
257
+ query.repository.read_many(query)
258
+ end
259
+
260
+ def first(*args)
261
+ query = args.last.respond_to?(:merge) ? args.pop : {}
262
+ query = scoped_query(query.merge(:limit => args.first || 1))
263
+
264
+ if args.any?
265
+ query.repository.read_many(query)
266
+ else
267
+ query.repository.read_one(query)
268
+ end
269
+ end
270
+
271
+ def [](*key)
272
+ warn("#{name}[] is deprecated. Use #{name}.get! instead.")
273
+ get!(*key)
274
+ end
275
+
276
+ def first_or_create(query, attributes = {})
277
+ first(query) || begin
278
+ resource = allocate
279
+ query = query.dup
280
+
281
+ properties(repository_name).key.each do |property|
282
+ if value = query.delete(property.name)
283
+ resource.send("#{property.name}=", value)
284
+ end
285
+ end
286
+
287
+ resource.attributes = query.merge(attributes)
288
+ resource.save
289
+ resource
290
+ end
291
+ end
292
+
293
+ ##
294
+ # Create an instance of Resource with the given attributes
295
+ #
296
+ # @param <Hash(Symbol => Object)> attributes hash of attributes to set
297
+ def create(attributes = {})
298
+ resource = new(attributes)
299
+ resource.save
300
+ resource
301
+ end
302
+
303
+ ##
304
+ # This method is deprecated, and will be removed from dm-core.
305
+ #
306
+ def create!(attributes = {})
307
+ warn("Model#create! is deprecated. It is moving to dm-validations, and will be used to create a record without validations")
308
+ resource = create(attributes)
309
+ raise PersistenceError, "Resource not saved: :new_record => #{resource.new_record?}, :dirty_attributes => #{resource.dirty_attributes.inspect}" if resource.new_record?
310
+ resource
311
+ end
312
+
313
+ ##
314
+ # Copy a set of records from one repository to another.
315
+ #
316
+ # @param [String] source
317
+ # The name of the Repository the resources should be copied _from_
318
+ # @param [String] destination
319
+ # The name of the Repository the resources should be copied _to_
320
+ # @param [Hash] query
321
+ # The conditions with which to find the records to copy. These
322
+ # conditions are merged with Model.query
323
+ #
324
+ # @return [DataMapper::Collection]
325
+ # A Collection of the Resource instances created in the operation
326
+ #
327
+ # @api public
328
+ def copy(source, destination, query = {})
329
+
330
+ # get the list of properties that exist in the source and destination
331
+ destination_properties = properties(destination)
332
+ fields = query[:fields] ||= properties(source).select { |p| destination_properties.has_property?(p.name) }
333
+
334
+ repository(destination) do
335
+ all(query.merge(:repository => repository(source))).map do |resource|
336
+ create(fields.map { |p| [ p.name, p.get(resource) ] }.to_hash)
337
+ end
338
+ end
339
+ end
340
+
341
+ # @api private
342
+ # TODO: spec this
343
+ def load(values, query)
344
+ repository = query.repository
345
+ model = self
346
+
347
+ if inheritance_property_index = query.inheritance_property_index
348
+ model = values.at(inheritance_property_index) || model
349
+ end
350
+
351
+ key_values = nil
352
+ identity_map = nil
353
+
354
+ if key_property_indexes = query.key_property_indexes(repository)
355
+ key_values = values.values_at(*key_property_indexes)
356
+ identity_map = repository.identity_map(model)
357
+
358
+ if resource = identity_map.get(key_values)
359
+ return resource unless query.reload?
360
+ else
361
+ resource = model.allocate
362
+ resource.instance_variable_set(:@repository, repository)
363
+ end
364
+ else
365
+ resource = model.allocate
366
+ resource.readonly!
367
+ end
368
+
369
+ resource.instance_variable_set(:@new_record, false)
370
+
371
+ query.fields.zip(values) do |property,value|
372
+ value = property.custom? ? property.type.load(value, property) : property.typecast(value)
373
+ property.set!(resource, value)
374
+
375
+ if track = property.track
376
+ case track
377
+ when :hash
378
+ resource.original_values[property.name] = value.dup.hash unless resource.original_values.has_key?(property.name) rescue value.hash
379
+ when :load
380
+ resource.original_values[property.name] = value unless resource.original_values.has_key?(property.name)
381
+ end
382
+ end
383
+ end
384
+
385
+ if key_values && identity_map
386
+ identity_map.set(key_values, resource)
387
+ end
388
+
389
+ resource
390
+ end
391
+
392
+ # TODO: spec this
393
+ def to_query(repository, key, query = {})
394
+ conditions = Hash[ *self.key(repository.name).zip(key).flatten ]
395
+ Query.new(repository, self, query.merge(conditions))
396
+ end
397
+
398
+ # TODO: add docs
399
+ # @api private
400
+ def _load(marshalled)
401
+ resource = allocate
402
+ Marshal.load(marshalled).each { |kv| resource.instance_variable_set(*kv) }
403
+ resource
404
+ end
405
+
406
+ def typecast_key(key)
407
+ self.key(repository_name).zip(key).map { |k, v| k.typecast(v) }
408
+ end
409
+
410
+ def default_repository_name
411
+ Repository.default_name
412
+ end
413
+
414
+ def paranoid_properties
415
+ @paranoid_properties ||= {}
416
+ @paranoid_properties
417
+ end
418
+
419
+ private
420
+
421
+ def default_storage_name
422
+ self.name
423
+ end
424
+
425
+ def scoped_query(query = self.query)
426
+ assert_kind_of 'query', query, Query, Hash
427
+
428
+ return self.query if query == self.query
429
+
430
+ query = if query.kind_of?(Hash)
431
+ Query.new(query.has_key?(:repository) ? query.delete(:repository) : self.repository, self, query)
432
+ else
433
+ query
434
+ end
435
+
436
+ if self.query
437
+ self.query.merge(query)
438
+ else
439
+ merge_with_default_scope(query)
440
+ end
441
+ end
442
+
443
+ def set_paranoid_property(name, &block)
444
+ self.paranoid_properties[name] = block
445
+ end
446
+
447
+ # defines the getter for the property
448
+ def create_property_getter(property)
449
+ class_eval <<-EOS, __FILE__, __LINE__
450
+ #{property.reader_visibility}
451
+ def #{property.getter}
452
+ attribute_get(#{property.name.inspect})
453
+ end
454
+ EOS
455
+
456
+ if property.primitive == TrueClass && !instance_methods.map { |m| m.to_s }.include?(property.name.to_s)
457
+ class_eval <<-EOS, __FILE__, __LINE__
458
+ #{property.reader_visibility}
459
+ alias #{property.name} #{property.getter}
460
+ EOS
461
+ end
462
+ end
463
+
464
+ # defines the setter for the property
465
+ def create_property_setter(property)
466
+ unless instance_methods.map { |m| m.to_s }.include?("#{property.name}=")
467
+ class_eval <<-EOS, __FILE__, __LINE__
468
+ #{property.writer_visibility}
469
+ def #{property.name}=(value)
470
+ attribute_set(#{property.name.inspect}, value)
471
+ end
472
+ EOS
473
+ end
474
+ end
475
+
476
+ def relationships(*args)
477
+ # DO NOT REMOVE!
478
+ # method_missing depends on these existing. Without this stub,
479
+ # a missing module can cause misleading recursive errors.
480
+ raise NotImplementedError.new
481
+ end
482
+
483
+ def method_missing(method, *args, &block)
484
+ if relationship = self.relationships(repository_name)[method]
485
+ klass = self == relationship.child_model ? relationship.parent_model : relationship.child_model
486
+ return DataMapper::Query::Path.new(repository, [ relationship ], klass)
487
+ end
488
+
489
+ property_set = properties(repository_name)
490
+ if property_set.has_property?(method)
491
+ return property_set[method]
492
+ end
493
+
494
+ super
495
+ end
496
+
497
+ # TODO: move to dm-more/dm-transactions
498
+ module Transaction
499
+ #
500
+ # Produce a new Transaction for this Resource class
501
+ #
502
+ # @return <DataMapper::Adapters::Transaction
503
+ # a new DataMapper::Adapters::Transaction with all DataMapper::Repositories
504
+ # of the class of this DataMapper::Resource added.
505
+ #-
506
+ # @api public
507
+ #
508
+ # TODO: move to dm-more/dm-transactions
509
+ def transaction
510
+ DataMapper::Transaction.new(self) { |block_args| yield(*block_args) }
511
+ end
512
+ end # module Transaction
513
+
514
+ include Transaction
515
+
516
+ # TODO: move to dm-more/dm-migrations
517
+ module Migration
518
+ # TODO: move to dm-more/dm-migrations
519
+ def storage_exists?(repository_name = default_repository_name)
520
+ repository(repository_name).storage_exists?(storage_name(repository_name))
521
+ end
522
+ end # module Migration
523
+
524
+ include Migration
525
+ end # module Model
526
+ end # module DataMapper