sam-dm-core 0.9.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (126) hide show
  1. data/.autotest +26 -0
  2. data/CONTRIBUTING +51 -0
  3. data/FAQ +92 -0
  4. data/History.txt +145 -0
  5. data/MIT-LICENSE +22 -0
  6. data/Manifest.txt +125 -0
  7. data/QUICKLINKS +12 -0
  8. data/README.txt +143 -0
  9. data/Rakefile +30 -0
  10. data/SPECS +63 -0
  11. data/TODO +1 -0
  12. data/lib/dm-core.rb +224 -0
  13. data/lib/dm-core/adapters.rb +4 -0
  14. data/lib/dm-core/adapters/abstract_adapter.rb +202 -0
  15. data/lib/dm-core/adapters/data_objects_adapter.rb +707 -0
  16. data/lib/dm-core/adapters/mysql_adapter.rb +136 -0
  17. data/lib/dm-core/adapters/postgres_adapter.rb +188 -0
  18. data/lib/dm-core/adapters/sqlite3_adapter.rb +105 -0
  19. data/lib/dm-core/associations.rb +199 -0
  20. data/lib/dm-core/associations/many_to_many.rb +147 -0
  21. data/lib/dm-core/associations/many_to_one.rb +107 -0
  22. data/lib/dm-core/associations/one_to_many.rb +309 -0
  23. data/lib/dm-core/associations/one_to_one.rb +61 -0
  24. data/lib/dm-core/associations/relationship.rb +218 -0
  25. data/lib/dm-core/associations/relationship_chain.rb +81 -0
  26. data/lib/dm-core/auto_migrations.rb +113 -0
  27. data/lib/dm-core/collection.rb +638 -0
  28. data/lib/dm-core/dependency_queue.rb +31 -0
  29. data/lib/dm-core/hook.rb +11 -0
  30. data/lib/dm-core/identity_map.rb +45 -0
  31. data/lib/dm-core/is.rb +16 -0
  32. data/lib/dm-core/logger.rb +232 -0
  33. data/lib/dm-core/migrations/destructive_migrations.rb +17 -0
  34. data/lib/dm-core/migrator.rb +29 -0
  35. data/lib/dm-core/model.rb +471 -0
  36. data/lib/dm-core/naming_conventions.rb +84 -0
  37. data/lib/dm-core/property.rb +673 -0
  38. data/lib/dm-core/property_set.rb +162 -0
  39. data/lib/dm-core/query.rb +625 -0
  40. data/lib/dm-core/repository.rb +159 -0
  41. data/lib/dm-core/resource.rb +637 -0
  42. data/lib/dm-core/scope.rb +58 -0
  43. data/lib/dm-core/support.rb +7 -0
  44. data/lib/dm-core/support/array.rb +13 -0
  45. data/lib/dm-core/support/assertions.rb +8 -0
  46. data/lib/dm-core/support/errors.rb +23 -0
  47. data/lib/dm-core/support/kernel.rb +7 -0
  48. data/lib/dm-core/support/symbol.rb +41 -0
  49. data/lib/dm-core/transaction.rb +267 -0
  50. data/lib/dm-core/type.rb +160 -0
  51. data/lib/dm-core/type_map.rb +80 -0
  52. data/lib/dm-core/types.rb +19 -0
  53. data/lib/dm-core/types/boolean.rb +7 -0
  54. data/lib/dm-core/types/discriminator.rb +34 -0
  55. data/lib/dm-core/types/object.rb +24 -0
  56. data/lib/dm-core/types/paranoid_boolean.rb +34 -0
  57. data/lib/dm-core/types/paranoid_datetime.rb +33 -0
  58. data/lib/dm-core/types/serial.rb +9 -0
  59. data/lib/dm-core/types/text.rb +10 -0
  60. data/lib/dm-core/version.rb +3 -0
  61. data/script/all +5 -0
  62. data/script/performance.rb +203 -0
  63. data/script/profile.rb +87 -0
  64. data/spec/integration/association_spec.rb +1371 -0
  65. data/spec/integration/association_through_spec.rb +203 -0
  66. data/spec/integration/associations/many_to_many_spec.rb +449 -0
  67. data/spec/integration/associations/many_to_one_spec.rb +163 -0
  68. data/spec/integration/associations/one_to_many_spec.rb +151 -0
  69. data/spec/integration/auto_migrations_spec.rb +398 -0
  70. data/spec/integration/collection_spec.rb +1069 -0
  71. data/spec/integration/data_objects_adapter_spec.rb +32 -0
  72. data/spec/integration/dependency_queue_spec.rb +58 -0
  73. data/spec/integration/model_spec.rb +127 -0
  74. data/spec/integration/mysql_adapter_spec.rb +85 -0
  75. data/spec/integration/postgres_adapter_spec.rb +731 -0
  76. data/spec/integration/property_spec.rb +233 -0
  77. data/spec/integration/query_spec.rb +506 -0
  78. data/spec/integration/repository_spec.rb +57 -0
  79. data/spec/integration/resource_spec.rb +475 -0
  80. data/spec/integration/sqlite3_adapter_spec.rb +352 -0
  81. data/spec/integration/sti_spec.rb +208 -0
  82. data/spec/integration/strategic_eager_loading_spec.rb +138 -0
  83. data/spec/integration/transaction_spec.rb +75 -0
  84. data/spec/integration/type_spec.rb +271 -0
  85. data/spec/lib/logging_helper.rb +18 -0
  86. data/spec/lib/mock_adapter.rb +27 -0
  87. data/spec/lib/model_loader.rb +91 -0
  88. data/spec/lib/publicize_methods.rb +28 -0
  89. data/spec/models/vehicles.rb +34 -0
  90. data/spec/models/zoo.rb +47 -0
  91. data/spec/spec.opts +3 -0
  92. data/spec/spec_helper.rb +86 -0
  93. data/spec/unit/adapters/abstract_adapter_spec.rb +133 -0
  94. data/spec/unit/adapters/adapter_shared_spec.rb +15 -0
  95. data/spec/unit/adapters/data_objects_adapter_spec.rb +628 -0
  96. data/spec/unit/adapters/postgres_adapter_spec.rb +133 -0
  97. data/spec/unit/associations/many_to_many_spec.rb +17 -0
  98. data/spec/unit/associations/many_to_one_spec.rb +152 -0
  99. data/spec/unit/associations/one_to_many_spec.rb +393 -0
  100. data/spec/unit/associations/one_to_one_spec.rb +7 -0
  101. data/spec/unit/associations/relationship_spec.rb +71 -0
  102. data/spec/unit/associations_spec.rb +242 -0
  103. data/spec/unit/auto_migrations_spec.rb +111 -0
  104. data/spec/unit/collection_spec.rb +182 -0
  105. data/spec/unit/data_mapper_spec.rb +35 -0
  106. data/spec/unit/identity_map_spec.rb +126 -0
  107. data/spec/unit/is_spec.rb +80 -0
  108. data/spec/unit/migrator_spec.rb +33 -0
  109. data/spec/unit/model_spec.rb +339 -0
  110. data/spec/unit/naming_conventions_spec.rb +36 -0
  111. data/spec/unit/property_set_spec.rb +83 -0
  112. data/spec/unit/property_spec.rb +753 -0
  113. data/spec/unit/query_spec.rb +530 -0
  114. data/spec/unit/repository_spec.rb +93 -0
  115. data/spec/unit/resource_spec.rb +626 -0
  116. data/spec/unit/scope_spec.rb +142 -0
  117. data/spec/unit/transaction_spec.rb +493 -0
  118. data/spec/unit/type_map_spec.rb +114 -0
  119. data/spec/unit/type_spec.rb +119 -0
  120. data/tasks/ci.rb +68 -0
  121. data/tasks/dm.rb +63 -0
  122. data/tasks/doc.rb +20 -0
  123. data/tasks/gemspec.rb +23 -0
  124. data/tasks/hoe.rb +46 -0
  125. data/tasks/install.rb +20 -0
  126. metadata +216 -0
@@ -0,0 +1,31 @@
1
+ module DataMapper
2
+ ##
3
+ #
4
+ # DataMapper's DependencyQueue is used to store callbacks for classes which
5
+ # may or may not be loaded already.
6
+ #
7
+ class DependencyQueue
8
+ def initialize
9
+ @dependencies = Hash.new { |h,k| h[k] = [] }
10
+ end
11
+
12
+ def add(class_name, &callback)
13
+ @dependencies[class_name] << callback
14
+ resolve!
15
+ end
16
+
17
+ def resolve!
18
+ @dependencies.each do |class_name, callbacks|
19
+ begin
20
+ klass = Object.find_const(class_name)
21
+ callbacks.each do |callback|
22
+ callback.call(klass)
23
+ end
24
+ callbacks.clear
25
+ rescue NameError
26
+ end
27
+ end
28
+ end
29
+
30
+ end # class DependencyQueue
31
+ end # module DataMapper
@@ -0,0 +1,11 @@
1
+ module DataMapper
2
+ module Hook
3
+ def self.included(model)
4
+ model.class_eval <<-EOS, __FILE__, __LINE__
5
+ include Extlib::Hook
6
+ register_instance_hooks :save, :create, :update, :destroy
7
+ EOS
8
+ end
9
+ end
10
+ DataMapper::Resource.append_inclusions Hook
11
+ end # module DataMapper
@@ -0,0 +1,45 @@
1
+ module DataMapper
2
+
3
+ # Tracks objects to help ensure that each object gets loaded only once.
4
+ # See: http://www.martinfowler.com/eaaCatalog/identityMap.html
5
+ class IdentityMap
6
+ # Get a resource from the IdentityMap
7
+ def get(key)
8
+ @cache[key]
9
+ end
10
+
11
+ alias [] get
12
+
13
+ # Add a resource to the IdentityMap
14
+ def set(key, resource)
15
+ @second_level_cache.set(key, resource) if @second_level_cache
16
+ @cache[key] = resource
17
+ end
18
+
19
+ alias []= set
20
+
21
+ # Remove a resource from the IdentityMap
22
+ def delete(key)
23
+ @second_level_cache.delete(key) if @second_level_cache
24
+ @cache.delete(key)
25
+ end
26
+
27
+ private
28
+
29
+ def initialize(second_level_cache = nil)
30
+ @cache = if @second_level_cache = second_level_cache
31
+ Hash.new { |h,key| h[key] = @second_level_cache.get(key) }
32
+ else
33
+ Hash.new
34
+ end
35
+ end
36
+
37
+ def cache
38
+ @cache
39
+ end
40
+
41
+ def method_missing(method, *args, &block)
42
+ cache.__send__(method, *args, &block)
43
+ end
44
+ end # class IdentityMap
45
+ end # module DataMapper
@@ -0,0 +1,16 @@
1
+ module DataMapper
2
+ module Is
3
+
4
+ def is(plugin, *pars, &block)
5
+ generator_method = "is_#{plugin}".to_sym
6
+
7
+ if self.respond_to?(generator_method)
8
+ self.send(generator_method, *pars, &block)
9
+ else
10
+ raise PluginNotFoundError, "could not find plugin named #{plugin}"
11
+ end
12
+ end
13
+
14
+ Model.send(:include, self)
15
+ end # module Is
16
+ end # module DataMapper
@@ -0,0 +1,232 @@
1
+ require "time" # httpdate
2
+ # ==== Public DataMapper Logger API
3
+ #
4
+ # Logger taken from Merb :)
5
+ #
6
+ # To replace an existing logger with a new one:
7
+ # DataMapper.logger.set_log(log{String, IO},level{Symbol, String})
8
+ #
9
+ # Available logging levels are:
10
+ # :off, :fatal, :error, :warn, :info, :debug
11
+ #
12
+ # Logging via:
13
+ # DataMapper.logger.fatal(message<String>)
14
+ # DataMapper.logger.error(message<String>)
15
+ # DataMapper.logger.warn(message<String>)
16
+ # DataMapper.logger.info(message<String>)
17
+ # DataMapper.logger.debug(message<String>)
18
+ #
19
+ # Flush the buffer to
20
+ # DataMapper.logger.flush
21
+ #
22
+ # Remove the current log object
23
+ # DataMapper.logger.close
24
+ #
25
+ # ==== Private DataMapper Logger API
26
+ #
27
+ # To initialize the logger you create a new object, proxies to set_log.
28
+ # DataMapper::Logger.new(log{String, IO}, level{Symbol, String})
29
+ #
30
+ # Logger will not create the file until something is actually logged
31
+ # This avoids file creation on DataMapper init when it creates the
32
+ # default logger.
33
+ module DataMapper
34
+
35
+ class << self #:nodoc:
36
+ attr_accessor :logger
37
+ end
38
+
39
+ class Logger
40
+
41
+ attr_accessor :aio
42
+ attr_accessor :delimiter
43
+ attr_reader :level
44
+ attr_reader :buffer
45
+ attr_reader :log
46
+
47
+ # @note
48
+ # Ruby (standard) logger levels:
49
+ # off: absolutely nothing
50
+ # fatal: an unhandleable error that results in a program crash
51
+ # error: a handleable error condition
52
+ # warn: a warning
53
+ # info: generic (useful) information about system operation
54
+ # debug: low-level information for developers
55
+ #
56
+ # DataMapper::Logger::LEVELS[:off, :fatal, :error, :warn, :info, :debug]
57
+ LEVELS =
58
+ {
59
+ :off => 99999,
60
+ :fatal => 7,
61
+ :error => 6,
62
+ :warn => 4,
63
+ :info => 3,
64
+ :debug => 0
65
+ }
66
+
67
+ def level=(new_level)
68
+ @level = LEVELS[new_level.to_sym]
69
+ reset_methods(:close)
70
+ end
71
+
72
+ private
73
+
74
+ # The idea here is that instead of performing an 'if' conditional check on
75
+ # each logging we do it once when the log object is setup
76
+ def set_write_method
77
+ @log.instance_eval do
78
+
79
+ # Determine if asynchronous IO can be used
80
+ def aio?
81
+ @aio = !RUBY_PLATFORM.match(/java|mswin/) &&
82
+ !(@log == STDOUT) &&
83
+ @log.respond_to?(:write_nonblock)
84
+ end
85
+
86
+ # Define the write method based on if aio an be used
87
+ undef write_method if defined? write_method
88
+ if aio?
89
+ alias :write_method :write_nonblock
90
+ else
91
+ alias :write_method :write
92
+ end
93
+ end
94
+ end
95
+
96
+ def initialize_log(log)
97
+ close if @log # be sure that we don't leave open files laying around.
98
+ @log = log || "log/dm.log"
99
+ end
100
+
101
+ def reset_methods(o_or_c)
102
+ if o_or_c == :open
103
+ alias internal_push push_opened
104
+ elsif o_or_c == :close
105
+ alias internal_push push_closed
106
+ end
107
+ end
108
+
109
+ def push_opened(string)
110
+ message = Time.now.httpdate
111
+ message << delimiter
112
+ message << string
113
+ message << "\n" unless message[-1] == ?\n
114
+ @buffer << message
115
+ flush # Force a flush for now until we figure out where we want to use the buffering.
116
+ end
117
+
118
+ def push_closed(string)
119
+ unless @log.respond_to?(:write)
120
+ log = Pathname(@log)
121
+ log.dirname.mkpath
122
+ @log = log.open('a')
123
+ @log.sync = true
124
+ end
125
+ set_write_method
126
+ reset_methods(:open)
127
+ push(string)
128
+ end
129
+
130
+ alias internal_push push_closed
131
+
132
+ def prep_msg(message, level)
133
+ level << delimiter << message
134
+ end
135
+
136
+ public
137
+
138
+ # To initialize the logger you create a new object, proxies to set_log.
139
+ # DataMapper::Logger.new(log{String, IO},level{Symbol, String})
140
+ #
141
+ # @param log<IO,String> either an IO object or a name of a logfile.
142
+ # @param log_level<String> the message string to be logged
143
+ # @param delimiter<String> delimiter to use between message sections
144
+ # @param log_creation<Boolean> log that the file is being created
145
+ def initialize(*args)
146
+ set_log(*args)
147
+ end
148
+
149
+ # To replace an existing logger with a new one:
150
+ # DataMapper.logger.set_log(log{String, IO},level{Symbol, String})
151
+ #
152
+ # @param log<IO,String> either an IO object or a name of a logfile.
153
+ # @param log_level<Symbol> a symbol representing the log level from
154
+ # {:off, :fatal, :error, :warn, :info, :debug}
155
+ # @param delimiter<String> delimiter to use between message sections
156
+ # @param log_creation<Boolean> log that the file is being created
157
+ def set_log(log, log_level = :off, delimiter = " ~ ", log_creation = false)
158
+ delimiter ||= " ~ "
159
+
160
+ if log_level && LEVELS[log_level.to_sym]
161
+ self.level = log_level.to_sym
162
+ else
163
+ self.level = :debug
164
+ end
165
+
166
+ @buffer = []
167
+ @delimiter = delimiter
168
+
169
+ initialize_log(log)
170
+
171
+ DataMapper.logger = self
172
+
173
+ self.info("Logfile created") if log_creation
174
+ end
175
+
176
+ # Flush the entire buffer to the log object.
177
+ # DataMapper.logger.flush
178
+ #
179
+ def flush
180
+ return unless @buffer.size > 0
181
+ @log.write_method(@buffer.slice!(0..-1).to_s)
182
+ end
183
+
184
+ # Close and remove the current log object.
185
+ # DataMapper.logger.close
186
+ #
187
+ def close
188
+ flush
189
+ @log.close if @log.respond_to?(:close)
190
+ @log = nil
191
+ end
192
+
193
+ # Appends a string and log level to logger's buffer.
194
+
195
+ # @note
196
+ # Note that the string is discarded if the string's log level less than the
197
+ # logger's log level.
198
+ # @note
199
+ # Note that if the logger is aio capable then the logger will use
200
+ # non-blocking asynchronous writes.
201
+ #
202
+ # @param level<Fixnum> the logging level as an integer
203
+ # @param string<String> the message string to be logged
204
+ def push(string)
205
+ internal_push(string)
206
+ end
207
+ alias << push
208
+
209
+ # Generate the following logging methods for DataMapper.logger as described
210
+ # in the API:
211
+ # :fatal, :error, :warn, :info, :debug
212
+ # :off only gets a off? method
213
+ LEVELS.each_pair do |name, number|
214
+ unless name.to_s == 'off'
215
+ class_eval <<-EOS, __FILE__, __LINE__
216
+ # DOC
217
+ def #{name}(message)
218
+ self.<<( prep_msg(message, "#{name}") ) if #{name}?
219
+ end
220
+ EOS
221
+ end
222
+
223
+ class_eval <<-EOS, __FILE__, __LINE__
224
+ # DOC
225
+ def #{name}?
226
+ #{number} >= level
227
+ end
228
+ EOS
229
+ end
230
+
231
+ end # class Logger
232
+ end # module DataMapper
@@ -0,0 +1,17 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ module DestructiveMigrations
5
+ def self.included(model)
6
+ DestructiveMigrator.models << model
7
+ end
8
+ end # module DestructiveMigrations
9
+
10
+ class DestructiveMigrator < Migrator
11
+ def self.migrate(repository_name)
12
+ models.each do |model|
13
+ model.auto_migrate!
14
+ end
15
+ end
16
+ end # class DestructiveMigrator
17
+ end # module DataMapper
@@ -0,0 +1,29 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ class Migrator
5
+ def self.subclasses
6
+ @@subclasses ||= []
7
+ end
8
+
9
+ def self.subclasses=(obj)
10
+ @@subclasses = obj
11
+ end
12
+
13
+ def self.inherited(klass)
14
+ subclasses << klass
15
+
16
+ class << klass
17
+ def models
18
+ @models ||= []
19
+ end
20
+ end
21
+ end
22
+
23
+ def self.migrate(repository_name)
24
+ subclasses.collect do |migrator|
25
+ migrator.migrate(repository_name)
26
+ end.flatten
27
+ end
28
+ end # class Migrator
29
+ end # module DataMapper
@@ -0,0 +1,471 @@
1
+ require 'set'
2
+
3
+ module DataMapper
4
+ module Model
5
+ ##
6
+ #
7
+ # Extends the model with this module after DataMapper::Resource has been
8
+ # included.
9
+ #
10
+ # This is a useful way to extend DataMapper::Model while
11
+ # still retaining a self.extended method.
12
+ #
13
+ # @param [Module] extensions the module that is to be extend the model after
14
+ # after DataMapper::Model
15
+ #
16
+ # @return [TrueClass, FalseClass] whether or not the inclusions have been
17
+ # successfully appended to the list
18
+ #-
19
+ # @api public
20
+ #
21
+ # TODO: Move this do DataMapper::Model when DataMapper::Model is created
22
+ def self.append_extensions(*extensions)
23
+ extra_extensions.concat extensions
24
+ true
25
+ end
26
+
27
+ def self.extra_extensions
28
+ @extra_extensions ||= []
29
+ end
30
+
31
+ def self.extended(model)
32
+ model.instance_variable_set(:@storage_names, Hash.new { |h,k| h[k] = repository(k).adapter.resource_naming_convention.call(model.send(:default_storage_name)) })
33
+ model.instance_variable_set(:@properties, Hash.new { |h,k| h[k] = k == Repository.default_name ? PropertySet.new : h[Repository.default_name].dup })
34
+ model.instance_variable_set(:@field_naming_conventions, Hash.new { |h,k| h[k] = repository(k).adapter.field_naming_convention })
35
+ extra_extensions.each { |extension| model.extend(extension) }
36
+ end
37
+
38
+ def inherited(target)
39
+ target.instance_variable_set(:@storage_names, @storage_names.dup)
40
+ target.instance_variable_set(:@properties, Hash.new { |h,k| h[k] = k == Repository.default_name ? PropertySet.new : h[Repository.default_name].dup })
41
+ target.instance_variable_set(:@base_model, self.base_model)
42
+ target.instance_variable_set(:@paranoid_properties, @paranoid_properties)
43
+ target.instance_variable_set(:@field_naming_conventions, @field_naming_conventions.dup)
44
+
45
+ if self.respond_to?(:validators)
46
+ @validations.contexts.each do |context, validators|
47
+ validators.each { |validator| target.validators.context(context) << validator }
48
+ end
49
+ end
50
+
51
+ @properties.each do |repository_name,properties|
52
+ repository(repository_name) do
53
+ properties.each do |property|
54
+ next if target.properties(repository_name).has_property?(property.name)
55
+ target.property(property.name, property.type, property.options.dup)
56
+ end
57
+ end
58
+ end
59
+
60
+ if @relationships
61
+ duped_relationships = Hash.new { |h,k| h[k] = {} }
62
+ @relationships.each do |repository_name,relationships|
63
+ relationships.each do |name, relationship|
64
+ dup = relationship.dup
65
+ dup.instance_variable_set(:@child_model, target) if dup.instance_variable_get(:@child_model) == self
66
+ dup.instance_variable_set(:@parent_model, target) if dup.instance_variable_get(:@parent_model) == self
67
+ duped_relationships[repository_name][name] = dup
68
+ end
69
+ end
70
+ target.instance_variable_set(:@relationships, duped_relationships)
71
+ end
72
+ end
73
+
74
+ def self.new(storage_name, &block)
75
+ model = Class.new
76
+ model.send(:include, Resource)
77
+ model.class_eval <<-EOS, __FILE__, __LINE__
78
+ def self.default_storage_name
79
+ #{Extlib::Inflection.classify(storage_name).inspect}
80
+ end
81
+ EOS
82
+ model.instance_eval(&block) if block_given?
83
+ model
84
+ end
85
+
86
+ def base_model
87
+ @base_model ||= self
88
+ end
89
+
90
+ def repository_name
91
+ Repository.context.any? ? Repository.context.last.name : default_repository_name
92
+ end
93
+
94
+ ##
95
+ # Get the repository with a given name, or the default one for the current
96
+ # context, or the default one for this class.
97
+ #
98
+ # @param name<Symbol> the name of the repository wanted
99
+ # @param block<Block> block to execute with the fetched repository as parameter
100
+ #
101
+ # @return <Object, DataMapper::Respository> whatever the block returns,
102
+ # if given a block, otherwise the requested repository.
103
+ #-
104
+ # @api public
105
+ def repository(name = nil, &block)
106
+ #
107
+ # There has been a couple of different strategies here, but me (zond) and dkubb are at least
108
+ # united in the concept of explicitness over implicitness. That is - the explicit wish of the
109
+ # caller (+name+) should be given more priority than the implicit wish of the caller (Repository.context.last).
110
+ #
111
+ DataMapper.repository(name || repository_name, &block)
112
+ end
113
+
114
+ ##
115
+ # the name of the storage recepticle for this resource. IE. table name, for database stores
116
+ #
117
+ # @return <String> the storage name (IE table name, for database stores) associated with this resource in the given repository
118
+ def storage_name(repository_name = default_repository_name)
119
+ @storage_names[repository_name]
120
+ end
121
+
122
+ ##
123
+ # the names of the storage recepticles for this resource across all repositories
124
+ #
125
+ # @return <Hash(Symbol => String)> All available names of storage recepticles
126
+ def storage_names
127
+ @storage_names
128
+ end
129
+
130
+ ##
131
+ # The field naming conventions for this resource across all repositories.
132
+ #
133
+ # @return <Hash(Symbol => String)> All available field naming conventions
134
+ def field_naming_conventions
135
+ @field_naming_conventions
136
+ end
137
+
138
+ ##
139
+ # defines a property on the resource
140
+ #
141
+ # @param <Symbol> name the name for which to call this property
142
+ # @param <Type> type the type to define this property ass
143
+ # @param <Hash(Symbol => String)> options a hash of available options
144
+ # @see DataMapper::Property
145
+ def property(name, type, options = {})
146
+ property = Property.new(self, name, type, options)
147
+
148
+ create_property_getter(property)
149
+ create_property_setter(property)
150
+
151
+ @properties[repository_name][property.name] = property
152
+
153
+ # Add property to the other mappings as well if this is for the default
154
+ # repository.
155
+ if repository_name == default_repository_name
156
+ @properties.each_pair do |repository_name, properties|
157
+ next if repository_name == default_repository_name
158
+ properties << property unless properties.has_property?(property.name)
159
+ end
160
+ end
161
+
162
+ # Add the property to the lazy_loads set for this resources repository
163
+ # only.
164
+ # TODO Is this right or should we add the lazy contexts to all
165
+ # repositories?
166
+ if property.lazy?
167
+ context = options.fetch(:lazy, :default)
168
+ context = :default if context == true
169
+
170
+ Array(context).each do |item|
171
+ @properties[repository_name].lazy_context(item) << name
172
+ end
173
+ end
174
+
175
+ # add the property to the child classes only if the property was
176
+ # added after the child classes' properties have been copied from
177
+ # the parent
178
+ if respond_to?(:descendants)
179
+ descendants.each do |model|
180
+ next if model.properties(repository_name).has_property?(name)
181
+ model.property(name, type, options)
182
+ end
183
+ end
184
+
185
+ property
186
+ end
187
+
188
+ def repositories
189
+ [ repository ].to_set + @properties.keys.collect { |repository_name| DataMapper.repository(repository_name) }
190
+ end
191
+
192
+ def properties(repository_name = default_repository_name)
193
+ @properties[repository_name]
194
+ end
195
+
196
+ def eager_properties(repository_name = default_repository_name)
197
+ @properties[repository_name].defaults
198
+ end
199
+
200
+ # @api private
201
+ def properties_with_subclasses(repository_name = default_repository_name)
202
+ properties = PropertySet.new
203
+ ([ self ].to_set + (respond_to?(:descendants) ? descendants : [])).each do |model|
204
+ model.relationships(repository_name).each_value { |relationship| relationship.child_key }
205
+ model.many_to_one_relationships.each do |relationship| relationship.child_key end
206
+ model.properties(repository_name).each do |property|
207
+ properties << property unless properties.has_property?(property.name)
208
+ end
209
+ end
210
+ properties
211
+ end
212
+
213
+ def key(repository_name = default_repository_name)
214
+ @properties[repository_name].key
215
+ end
216
+
217
+ def inheritance_property(repository_name = default_repository_name)
218
+ @properties[repository_name].inheritance_property
219
+ end
220
+
221
+ def default_order(repository_name = default_repository_name)
222
+ key(repository_name).map { |property| Query::Direction.new(property) }
223
+ end
224
+
225
+ def get(*key)
226
+ key = typecast_key(key)
227
+ repository.identity_map(self).get(key) || first(to_query(repository, key))
228
+ end
229
+
230
+ def get!(*key)
231
+ get(*key) || raise(ObjectNotFoundError, "Could not find #{self.name} with key #{key.inspect}")
232
+ end
233
+
234
+ def all(query = {})
235
+ query = scoped_query(query)
236
+ query.repository.read_many(query)
237
+ end
238
+
239
+ def first(*args)
240
+ query = args.last.respond_to?(:merge) ? args.pop : {}
241
+ query = scoped_query(query.merge(:limit => args.first || 1))
242
+
243
+ if args.any?
244
+ query.repository.read_many(query)
245
+ else
246
+ query.repository.read_one(query)
247
+ end
248
+ end
249
+
250
+ def [](*key)
251
+ warn("#{name}[] is deprecated. Use #{name}.get! instead.")
252
+ get!(*key)
253
+ end
254
+
255
+ def first_or_create(query, attributes = {})
256
+ first(query) || begin
257
+ resource = allocate
258
+ query = query.dup
259
+
260
+ properties(repository_name).key.each do |property|
261
+ if value = query.delete(property.name)
262
+ resource.send("#{property.name}=", value)
263
+ end
264
+ end
265
+
266
+ resource.attributes = query.merge(attributes)
267
+ resource.save
268
+ resource
269
+ end
270
+ end
271
+
272
+ ##
273
+ # Create an instance of Resource with the given attributes
274
+ #
275
+ # @param <Hash(Symbol => Object)> attributes hash of attributes to set
276
+ def create(attributes = {})
277
+ resource = new(attributes)
278
+ resource.save
279
+ resource
280
+ end
281
+
282
+ ##
283
+ # This method is deprecated, and will be removed from dm-core.
284
+ #
285
+ def create!(attributes = {})
286
+ warn("Model#create! is deprecated. It is moving to dm-validations, and will be used to create a record without validations")
287
+ resource = create(attributes)
288
+ raise PersistenceError, "Resource not saved: :new_record => #{resource.new_record?}, :dirty_attributes => #{resource.dirty_attributes.inspect}" if resource.new_record?
289
+ resource
290
+ end
291
+
292
+ # TODO SPEC
293
+ def copy(source, destination, query = {})
294
+ repository(destination) do
295
+ repository(source).read_many(query).each do |resource|
296
+ self.create(resource)
297
+ end
298
+ end
299
+ end
300
+
301
+ # @api private
302
+ # TODO: spec this
303
+ def load(values, query)
304
+ repository = query.repository
305
+ model = self
306
+
307
+ if inheritance_property_index = query.inheritance_property_index(repository)
308
+ model = values.at(inheritance_property_index) || model
309
+ end
310
+
311
+ if key_property_indexes = query.key_property_indexes(repository)
312
+ key_values = values.values_at(*key_property_indexes)
313
+ identity_map = repository.identity_map(model)
314
+
315
+ if resource = identity_map.get(key_values)
316
+ return resource unless query.reload?
317
+ else
318
+ resource = model.allocate
319
+ resource.instance_variable_set(:@repository, repository)
320
+ identity_map.set(key_values, resource)
321
+ end
322
+ else
323
+ resource = model.allocate
324
+ resource.readonly!
325
+ end
326
+
327
+ resource.instance_variable_set(:@new_record, false)
328
+
329
+ query.fields.zip(values) do |property,value|
330
+ value = property.custom? ? property.type.load(value, property) : property.typecast(value)
331
+ property.set!(resource, value)
332
+
333
+ if track = property.track
334
+ case track
335
+ when :hash
336
+ resource.original_values[property.name] = value.dup.hash unless resource.original_values.has_key?(property.name) rescue value.hash
337
+ when :load
338
+ resource.original_values[property.name] = value unless resource.original_values.has_key?(property.name)
339
+ end
340
+ end
341
+ end
342
+
343
+ resource
344
+ end
345
+
346
+ # TODO: spec this
347
+ def to_query(repository, key, query = {})
348
+ conditions = Hash[ *self.key(repository_name).zip(key).flatten ]
349
+ Query.new(repository, self, query.merge(conditions))
350
+ end
351
+
352
+ def typecast_key(key)
353
+ self.key(repository_name).zip(key).map { |k, v| k.typecast(v) }
354
+ end
355
+
356
+ def default_repository_name
357
+ Repository.default_name
358
+ end
359
+
360
+ def paranoid_properties
361
+ @paranoid_properties ||= {}
362
+ @paranoid_properties
363
+ end
364
+
365
+ private
366
+
367
+ def default_storage_name
368
+ self.name
369
+ end
370
+
371
+ def scoped_query(query = self.query)
372
+ assert_kind_of 'query', query, Query, Hash
373
+
374
+ return self.query if query == self.query
375
+
376
+ query = if query.kind_of?(Hash)
377
+ Query.new(query.has_key?(:repository) ? query.delete(:repository) : self.repository, self, query)
378
+ else
379
+ query
380
+ end
381
+
382
+ if self.query
383
+ self.query.merge(query)
384
+ else
385
+ merge_with_default_scope(query)
386
+ end
387
+ end
388
+
389
+ def set_paranoid_property(name, &block)
390
+ self.paranoid_properties[name] = block
391
+ end
392
+
393
+ # defines the getter for the property
394
+ def create_property_getter(property)
395
+ class_eval <<-EOS, __FILE__, __LINE__
396
+ #{property.reader_visibility}
397
+ def #{property.getter}
398
+ attribute_get(#{property.name.inspect})
399
+ end
400
+ EOS
401
+
402
+ if property.primitive == TrueClass && !instance_methods.include?(property.name.to_s)
403
+ class_eval <<-EOS, __FILE__, __LINE__
404
+ #{property.reader_visibility}
405
+ alias #{property.name} #{property.getter}
406
+ EOS
407
+ end
408
+ end
409
+
410
+ # defines the setter for the property
411
+ def create_property_setter(property)
412
+ unless instance_methods.include?("#{property.name}=")
413
+ class_eval <<-EOS, __FILE__, __LINE__
414
+ #{property.writer_visibility}
415
+ def #{property.name}=(value)
416
+ attribute_set(#{property.name.inspect}, value)
417
+ end
418
+ EOS
419
+ end
420
+ end
421
+
422
+ def relationships(*args)
423
+ # DO NOT REMOVE!
424
+ # method_missing depends on these existing. Without this stub,
425
+ # a missing module can cause misleading recursive errors.
426
+ raise NotImplementedError.new
427
+ end
428
+
429
+ def method_missing(method, *args, &block)
430
+ if relationship = self.relationships(repository_name)[method]
431
+ klass = self == relationship.child_model ? relationship.parent_model : relationship.child_model
432
+ return DataMapper::Query::Path.new(repository, [ relationship ], klass)
433
+ end
434
+
435
+ if property = properties(repository_name)[method]
436
+ return property
437
+ end
438
+
439
+ super
440
+ end
441
+
442
+ # TODO: move to dm-more/dm-transactions
443
+ module Transaction
444
+ #
445
+ # Produce a new Transaction for this Resource class
446
+ #
447
+ # @return <DataMapper::Adapters::Transaction
448
+ # a new DataMapper::Adapters::Transaction with all DataMapper::Repositories
449
+ # of the class of this DataMapper::Resource added.
450
+ #-
451
+ # @api public
452
+ #
453
+ # TODO: move to dm-more/dm-transactions
454
+ def transaction(&block)
455
+ DataMapper::Transaction.new(self, &block)
456
+ end
457
+ end # module Transaction
458
+
459
+ include Transaction
460
+
461
+ # TODO: move to dm-more/dm-migrations
462
+ module Migration
463
+ # TODO: move to dm-more/dm-migrations
464
+ def storage_exists?(repository_name = default_repository_name)
465
+ repository(repository_name).storage_exists?(storage_name(repository_name))
466
+ end
467
+ end # module Migration
468
+
469
+ include Migration
470
+ end # module Model
471
+ end # module DataMapper