dm-core 0.9.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (101) hide show
  1. data/CHANGELOG +144 -0
  2. data/FAQ +74 -0
  3. data/MIT-LICENSE +22 -0
  4. data/QUICKLINKS +12 -0
  5. data/README +143 -0
  6. data/lib/dm-core.rb +213 -0
  7. data/lib/dm-core/adapters.rb +4 -0
  8. data/lib/dm-core/adapters/abstract_adapter.rb +202 -0
  9. data/lib/dm-core/adapters/data_objects_adapter.rb +701 -0
  10. data/lib/dm-core/adapters/mysql_adapter.rb +132 -0
  11. data/lib/dm-core/adapters/postgres_adapter.rb +179 -0
  12. data/lib/dm-core/adapters/sqlite3_adapter.rb +105 -0
  13. data/lib/dm-core/associations.rb +172 -0
  14. data/lib/dm-core/associations/many_to_many.rb +138 -0
  15. data/lib/dm-core/associations/many_to_one.rb +101 -0
  16. data/lib/dm-core/associations/one_to_many.rb +275 -0
  17. data/lib/dm-core/associations/one_to_one.rb +61 -0
  18. data/lib/dm-core/associations/relationship.rb +116 -0
  19. data/lib/dm-core/associations/relationship_chain.rb +74 -0
  20. data/lib/dm-core/auto_migrations.rb +64 -0
  21. data/lib/dm-core/collection.rb +604 -0
  22. data/lib/dm-core/hook.rb +11 -0
  23. data/lib/dm-core/identity_map.rb +45 -0
  24. data/lib/dm-core/is.rb +16 -0
  25. data/lib/dm-core/logger.rb +233 -0
  26. data/lib/dm-core/migrations/destructive_migrations.rb +17 -0
  27. data/lib/dm-core/migrator.rb +29 -0
  28. data/lib/dm-core/model.rb +399 -0
  29. data/lib/dm-core/naming_conventions.rb +52 -0
  30. data/lib/dm-core/property.rb +611 -0
  31. data/lib/dm-core/property_set.rb +158 -0
  32. data/lib/dm-core/query.rb +590 -0
  33. data/lib/dm-core/repository.rb +159 -0
  34. data/lib/dm-core/resource.rb +618 -0
  35. data/lib/dm-core/scope.rb +35 -0
  36. data/lib/dm-core/support.rb +7 -0
  37. data/lib/dm-core/support/array.rb +13 -0
  38. data/lib/dm-core/support/assertions.rb +8 -0
  39. data/lib/dm-core/support/errors.rb +23 -0
  40. data/lib/dm-core/support/kernel.rb +7 -0
  41. data/lib/dm-core/support/symbol.rb +41 -0
  42. data/lib/dm-core/transaction.rb +267 -0
  43. data/lib/dm-core/type.rb +160 -0
  44. data/lib/dm-core/type_map.rb +80 -0
  45. data/lib/dm-core/types.rb +19 -0
  46. data/lib/dm-core/types/boolean.rb +7 -0
  47. data/lib/dm-core/types/discriminator.rb +32 -0
  48. data/lib/dm-core/types/object.rb +20 -0
  49. data/lib/dm-core/types/paranoid_boolean.rb +23 -0
  50. data/lib/dm-core/types/paranoid_datetime.rb +22 -0
  51. data/lib/dm-core/types/serial.rb +9 -0
  52. data/lib/dm-core/types/text.rb +10 -0
  53. data/spec/integration/association_spec.rb +1215 -0
  54. data/spec/integration/association_through_spec.rb +150 -0
  55. data/spec/integration/associations/many_to_many_spec.rb +171 -0
  56. data/spec/integration/associations/many_to_one_spec.rb +123 -0
  57. data/spec/integration/associations/one_to_many_spec.rb +66 -0
  58. data/spec/integration/auto_migrations_spec.rb +398 -0
  59. data/spec/integration/collection_spec.rb +1015 -0
  60. data/spec/integration/data_objects_adapter_spec.rb +32 -0
  61. data/spec/integration/model_spec.rb +68 -0
  62. data/spec/integration/mysql_adapter_spec.rb +85 -0
  63. data/spec/integration/postgres_adapter_spec.rb +732 -0
  64. data/spec/integration/property_spec.rb +224 -0
  65. data/spec/integration/query_spec.rb +376 -0
  66. data/spec/integration/repository_spec.rb +57 -0
  67. data/spec/integration/resource_spec.rb +324 -0
  68. data/spec/integration/sqlite3_adapter_spec.rb +352 -0
  69. data/spec/integration/sti_spec.rb +185 -0
  70. data/spec/integration/transaction_spec.rb +75 -0
  71. data/spec/integration/type_spec.rb +149 -0
  72. data/spec/lib/mock_adapter.rb +27 -0
  73. data/spec/spec_helper.rb +112 -0
  74. data/spec/unit/adapters/abstract_adapter_spec.rb +133 -0
  75. data/spec/unit/adapters/adapter_shared_spec.rb +15 -0
  76. data/spec/unit/adapters/data_objects_adapter_spec.rb +627 -0
  77. data/spec/unit/adapters/postgres_adapter_spec.rb +125 -0
  78. data/spec/unit/associations/many_to_many_spec.rb +14 -0
  79. data/spec/unit/associations/many_to_one_spec.rb +138 -0
  80. data/spec/unit/associations/one_to_many_spec.rb +385 -0
  81. data/spec/unit/associations/one_to_one_spec.rb +7 -0
  82. data/spec/unit/associations/relationship_spec.rb +67 -0
  83. data/spec/unit/associations_spec.rb +205 -0
  84. data/spec/unit/auto_migrations_spec.rb +110 -0
  85. data/spec/unit/collection_spec.rb +174 -0
  86. data/spec/unit/data_mapper_spec.rb +21 -0
  87. data/spec/unit/identity_map_spec.rb +126 -0
  88. data/spec/unit/is_spec.rb +80 -0
  89. data/spec/unit/migrator_spec.rb +33 -0
  90. data/spec/unit/model_spec.rb +339 -0
  91. data/spec/unit/naming_conventions_spec.rb +28 -0
  92. data/spec/unit/property_set_spec.rb +96 -0
  93. data/spec/unit/property_spec.rb +447 -0
  94. data/spec/unit/query_spec.rb +485 -0
  95. data/spec/unit/repository_spec.rb +93 -0
  96. data/spec/unit/resource_spec.rb +557 -0
  97. data/spec/unit/scope_spec.rb +131 -0
  98. data/spec/unit/transaction_spec.rb +493 -0
  99. data/spec/unit/type_map_spec.rb +114 -0
  100. data/spec/unit/type_spec.rb +119 -0
  101. metadata +187 -0
@@ -0,0 +1,11 @@
1
+ module DataMapper
2
+ module Hook
3
+ def self.included(model)
4
+ model.class_eval %{
5
+ include Extlib::Hook
6
+ register_instance_hooks :save, :create, :update, :destroy
7
+ }
8
+ end
9
+ end
10
+ DataMapper::Resource.append_inclusions Hook
11
+ end # module DataMapper
@@ -0,0 +1,45 @@
1
+ module DataMapper
2
+
3
+ # Tracks objects to help ensure that each object gets loaded only once.
4
+ # See: http://www.martinfowler.com/eaaCatalog/identityMap.html
5
+ class IdentityMap
6
+ # Get a resource from the IdentityMap
7
+ def get(key)
8
+ @cache[key]
9
+ end
10
+
11
+ alias [] get
12
+
13
+ # Add a resource to the IdentityMap
14
+ def set(key, resource)
15
+ @second_level_cache.set(key, resource) if @second_level_cache
16
+ @cache[key] = resource
17
+ end
18
+
19
+ alias []= set
20
+
21
+ # Remove a resource from the IdentityMap
22
+ def delete(key)
23
+ @second_level_cache.delete(key) if @second_level_cache
24
+ @cache.delete(key)
25
+ end
26
+
27
+ private
28
+
29
+ def initialize(second_level_cache = nil)
30
+ @cache = if @second_level_cache = second_level_cache
31
+ Hash.new { |h,key| h[key] = @second_level_cache.get(key) }
32
+ else
33
+ Hash.new
34
+ end
35
+ end
36
+
37
+ def cache
38
+ @cache
39
+ end
40
+
41
+ def method_missing(method, *args, &block)
42
+ cache.__send__(method, *args, &block)
43
+ end
44
+ end # class IdentityMap
45
+ end # module DataMapper
@@ -0,0 +1,16 @@
1
+ module DataMapper
2
+ module Is
3
+
4
+ def is(plugin,*pars)
5
+ generator_method = "is_#{plugin}".to_sym
6
+
7
+ if self.respond_to?(generator_method)
8
+ self.send(generator_method,*pars)
9
+ else
10
+ raise PluginNotFoundError, "could not find plugin named #{plugin}"
11
+ end
12
+ end
13
+
14
+ Model.send(:include, self)
15
+ end # module Is
16
+ end # module DataMapper
@@ -0,0 +1,233 @@
1
+ require "time" # httpdate
2
+ # ==== Public DataMapper Logger API
3
+ #
4
+ # Logger taken from Merb :)
5
+ #
6
+ # To replace an existing logger with a new one:
7
+ # DataMapper::Logger.set_log(log{String, IO},level{Symbol, String})
8
+ #
9
+ # Available logging levels are
10
+ # DataMapper::Logger::{ Fatal, Error, Warn, Info, Debug }
11
+ #
12
+ # Logging via:
13
+ # DataMapper.logger.fatal(message<String>)
14
+ # DataMapper.logger.error(message<String>)
15
+ # DataMapper.logger.warn(message<String>)
16
+ # DataMapper.logger.info(message<String>)
17
+ # DataMapper.logger.debug(message<String>)
18
+ #
19
+ # Flush the buffer to
20
+ # DataMapper.logger.flush
21
+ #
22
+ # Remove the current log object
23
+ # DataMapper.logger.close
24
+ #
25
+ # ==== Private DataMapper Logger API
26
+ #
27
+ # To initialize the logger you create a new object, proxies to set_log.
28
+ # DataMapper::Logger.new(log{String, IO},level{Symbol, String})
29
+ #
30
+ # Logger will not create the file until something is actually logged
31
+ # This avoids file creation on DataMapper init when it creates the
32
+ # default logger.
33
+ module DataMapper
34
+
35
+ class << self #:nodoc:
36
+ attr_accessor :logger
37
+ end
38
+
39
+ class Logger
40
+
41
+ attr_accessor :aio
42
+ attr_accessor :delimiter
43
+ attr_reader :level
44
+ attr_reader :buffer
45
+ attr_reader :log
46
+
47
+ # @note
48
+ # Ruby (standard) logger levels:
49
+ # off: absolutely nothing
50
+ # fatal: an unhandleable error that results in a program crash
51
+ # error: a handleable error condition
52
+ # warn: a warning
53
+ # info: generic (useful) information about system operation
54
+ # debug: low-level information for developers
55
+ #
56
+ # DataMapper::Logger::LEVELS[:off, :fatal, :error, :warn, :info, :debug]
57
+ LEVELS =
58
+ {
59
+ :off => 99999,
60
+ :fatal => 7,
61
+ :error => 6,
62
+ :warn => 4,
63
+ :info => 3,
64
+ :debug => 0
65
+ }
66
+
67
+ def level=(new_level)
68
+ @level = LEVELS[new_level.to_sym]
69
+ reset_methods(:close)
70
+ end
71
+
72
+ private
73
+
74
+ # The idea here is that instead of performing an 'if' conditional check on
75
+ # each logging we do it once when the log object is setup
76
+ def set_write_method
77
+ @log.instance_eval do
78
+
79
+ # Determine if asynchronous IO can be used
80
+ def aio?
81
+ @aio = !RUBY_PLATFORM.match(/java|mswin/) &&
82
+ !(@log == STDOUT) &&
83
+ @log.respond_to?(:write_nonblock)
84
+ end
85
+
86
+ # Define the write method based on if aio an be used
87
+ undef write_method if defined? write_method
88
+ if aio?
89
+ alias :write_method :write_nonblock
90
+ else
91
+ alias :write_method :write
92
+ end
93
+ end
94
+ end
95
+
96
+ def initialize_log(log)
97
+ close if @log # be sure that we don't leave open files laying around.
98
+ @log = log || "log/dm.log"
99
+ end
100
+
101
+ def reset_methods(o_or_c)
102
+ if o_or_c == :open
103
+ alias internal_push push_opened
104
+ elsif o_or_c == :close
105
+ alias internal_push push_closed
106
+ end
107
+ end
108
+
109
+ def push_opened(string)
110
+ message = Time.now.httpdate
111
+ message << delimiter
112
+ message << string
113
+ message << "\n" unless message[-1] == ?\n
114
+ @buffer << message
115
+ flush # Force a flush for now until we figure out where we want to use the buffering.
116
+ end
117
+
118
+ def push_closed(string)
119
+ unless @log.respond_to?(:write)
120
+ log = Pathname(@log)
121
+ log.dirname.mkpath
122
+ @log = log.open('a')
123
+ @log.sync = true
124
+ end
125
+ set_write_method
126
+ reset_methods(:open)
127
+ push(string)
128
+ end
129
+
130
+ alias internal_push push_closed
131
+
132
+ def prep_msg(message, level)
133
+ level << delimiter << message
134
+ end
135
+
136
+ public
137
+
138
+ # To initialize the logger you create a new object, proxies to set_log.
139
+ # DataMapper::Logger.new(log{String, IO},level{Symbol, String})
140
+ #
141
+ # @param log<IO,String> either an IO object or a name of a logfile.
142
+ # @param log_level<String> the message string to be logged
143
+ # @param delimiter<String> delimiter to use between message sections
144
+ # @param log_creation<Boolean> log that the file is being created
145
+ def initialize(*args)
146
+ set_log(*args)
147
+ end
148
+
149
+ # To replace an existing logger with a new one:
150
+ # DataMapper::Logger.set_log(log{String, IO},level{Symbol, String})
151
+ #
152
+ #
153
+ # @param log<IO,String> either an IO object or a name of a logfile.
154
+ # @param log_level<Symbol> a symbol representing the log level from
155
+ # {:off, :fatal, :error, :warn, :info, :debug}
156
+ # @param delimiter<String> delimiter to use between message sections
157
+ # @param log_creation<Boolean> log that the file is being created
158
+ def set_log(log, log_level = :off, delimiter = " ~ ", log_creation = false)
159
+ delimiter ||= " ~ "
160
+
161
+ if log_level && LEVELS[log_level.to_sym]
162
+ self.level = log_level.to_sym
163
+ else
164
+ self.level = :debug
165
+ end
166
+
167
+ @buffer = []
168
+ @delimiter = delimiter
169
+
170
+ initialize_log(log)
171
+
172
+ DataMapper.logger = self
173
+
174
+ self.info("Logfile created") if log_creation
175
+ end
176
+
177
+ # Flush the entire buffer to the log object.
178
+ # DataMapper.logger.flush
179
+ #
180
+ def flush
181
+ return unless @buffer.size > 0
182
+ @log.write_method(@buffer.slice!(0..-1).to_s)
183
+ end
184
+
185
+ # Close and remove the current log object.
186
+ # DataMapper.logger.close
187
+ #
188
+ def close
189
+ flush
190
+ @log.close if @log.respond_to?(:close)
191
+ @log = nil
192
+ end
193
+
194
+ # Appends a string and log level to logger's buffer.
195
+
196
+ # @note
197
+ # Note that the string is discarded if the string's log level less than the
198
+ # logger's log level.
199
+ # @note
200
+ # Note that if the logger is aio capable then the logger will use
201
+ # non-blocking asynchronous writes.
202
+ #
203
+ # @param level<Fixnum> the logging level as an integer
204
+ # @param string<String> the message string to be logged
205
+ def push(string)
206
+ internal_push(string)
207
+ end
208
+ alias << push
209
+
210
+ # Generate the following logging methods for DataMapper.logger as described
211
+ # in the API:
212
+ # :fatal, :error, :warn, :info, :debug
213
+ # :off only gets a off? method
214
+ LEVELS.each_pair do |name, number|
215
+ unless name.to_s == 'off'
216
+ class_eval <<-EOS
217
+ # DOC
218
+ def #{name}(message)
219
+ self.<<( prep_msg(message, "#{name}") ) if #{name}?
220
+ end
221
+ EOS
222
+ end
223
+
224
+ class_eval <<-EOS
225
+ # DOC
226
+ def #{name}?
227
+ #{number} >= level
228
+ end
229
+ EOS
230
+ end
231
+
232
+ end # class Logger
233
+ end # module DataMapper
@@ -0,0 +1,17 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ module DestructiveMigrations
5
+ def self.included(model)
6
+ DestructiveMigrator.models << model
7
+ end
8
+ end # module DestructiveMigrations
9
+
10
+ class DestructiveMigrator < Migrator
11
+ def self.migrate(repository_name)
12
+ models.each do |model|
13
+ model.auto_migrate!
14
+ end
15
+ end
16
+ end # class DestructiveMigrator
17
+ end # module DataMapper
@@ -0,0 +1,29 @@
1
+ # TODO: move to dm-more/dm-migrations
2
+
3
+ module DataMapper
4
+ class Migrator
5
+ def self.subclasses
6
+ @@subclasses ||= []
7
+ end
8
+
9
+ def self.subclasses=(obj)
10
+ @@subclasses = obj
11
+ end
12
+
13
+ def self.inherited(klass)
14
+ subclasses << klass
15
+
16
+ class << klass
17
+ def models
18
+ @models ||= []
19
+ end
20
+ end
21
+ end
22
+
23
+ def self.migrate(repository_name)
24
+ subclasses.collect do |migrator|
25
+ migrator.migrate(repository_name)
26
+ end.flatten
27
+ end
28
+ end # class Migrator
29
+ end # module DataMapper
@@ -0,0 +1,399 @@
1
+ module DataMapper
2
+ module Model
3
+ ##
4
+ #
5
+ # Extends the model with this module after DataMapper::Resource has been
6
+ # included.
7
+ #
8
+ # This is a useful way to extend DataMapper::Model while
9
+ # still retaining a self.extended method.
10
+ #
11
+ # @param [Module] extensions the module that is to be extend the model after
12
+ # after DataMapper::Model
13
+ #
14
+ # @return [TrueClass, FalseClass] whether or not the inclusions have been
15
+ # successfully appended to the list
16
+ #-
17
+ # @api public
18
+ #
19
+ # TODO: Move this do DataMapper::Model when DataMapper::Model is created
20
+ def self.append_extensions(*extensions)
21
+ extra_extensions.concat extensions
22
+ true
23
+ end
24
+
25
+ def self.extra_extensions
26
+ @extra_extensions ||= []
27
+ end
28
+
29
+ def self.extended(model)
30
+ model.instance_variable_set(:@storage_names, Hash.new { |h,k| h[k] = repository(k).adapter.resource_naming_convention.call(model.instance_eval { default_storage_name }) })
31
+ model.instance_variable_set(:@properties, Hash.new { |h,k| h[k] = k == Repository.default_name ? PropertySet.new : h[Repository.default_name].dup })
32
+ extra_extensions.each { |extension| model.extend(extension) }
33
+ end
34
+
35
+ def inherited(target)
36
+ target.instance_variable_set(:@storage_names, @storage_names.dup)
37
+ target.instance_variable_set(:@properties, Hash.new { |h,k| h[k] = k == Repository.default_name ? self.properties(Repository.default_name).dup(target) : h[Repository.default_name].dup })
38
+
39
+ if @relationships
40
+ duped_relationships = {}; @relationships.each_pair{ |repos, rels| duped_relationships[repos] = rels.dup}
41
+ target.instance_variable_set(:@relationships, duped_relationships)
42
+ end
43
+ end
44
+
45
+ def self.new(storage_name, &block)
46
+ model = Class.new
47
+ model.send(:include, Resource)
48
+ model.class_eval <<-EOS, __FILE__, __LINE__
49
+ def self.default_storage_name
50
+ #{Extlib::Inflection.classify(storage_name).inspect}
51
+ end
52
+ EOS
53
+ model.instance_eval(&block) if block_given?
54
+ model
55
+ end
56
+
57
+ ##
58
+ # Get the repository with a given name, or the default one for the current
59
+ # context, or the default one for this class.
60
+ #
61
+ # @param name<Symbol> the name of the repository wanted
62
+ # @param block<Block> block to execute with the fetched repository as parameter
63
+ #
64
+ # @return <Object, DataMapper::Respository> whatever the block returns,
65
+ # if given a block, otherwise the requested repository.
66
+ #-
67
+ # @api public
68
+ def repository(name = nil, &block)
69
+ #
70
+ # There has been a couple of different strategies here, but me (zond) and dkubb are at least
71
+ # united in the concept of explicitness over implicitness. That is - the explicit wish of the
72
+ # caller (+name+) should be given more priority than the implicit wish of the caller (Repository.context.last).
73
+ #
74
+ DataMapper.repository(*Array(name || (Repository.context.last ? nil : default_repository_name)), &block)
75
+ end
76
+
77
+ ##
78
+ # the name of the storage recepticle for this resource. IE. table name, for database stores
79
+ #
80
+ # @return <String> the storage name (IE table name, for database stores) associated with this resource in the given repository
81
+ def storage_name(repository_name = default_repository_name)
82
+ @storage_names[repository_name]
83
+ end
84
+
85
+ ##
86
+ # the names of the storage recepticles for this resource across all repositories
87
+ #
88
+ # @return <Hash(Symbol => String)> All available names of storage recepticles
89
+ def storage_names
90
+ @storage_names
91
+ end
92
+
93
+ ##
94
+ # defines a property on the resource
95
+ #
96
+ # @param <Symbol> name the name for which to call this property
97
+ # @param <Type> type the type to define this property ass
98
+ # @param <Hash(Symbol => String)> options a hash of available options
99
+ # @see DataMapper::Property
100
+ def property(name, type, options = {})
101
+ property = Property.new(self, name, type, options)
102
+
103
+ create_property_getter(property)
104
+ create_property_setter(property)
105
+
106
+ @properties[repository.name] << property
107
+
108
+ # Add property to the other mappings as well if this is for the default
109
+ # repository.
110
+ if repository.name == default_repository_name
111
+ @properties.each_pair do |repository_name, properties|
112
+ next if repository_name == default_repository_name
113
+ properties << property
114
+ end
115
+ end
116
+
117
+ # Add the property to the lazy_loads set for this resources repository
118
+ # only.
119
+ # TODO Is this right or should we add the lazy contexts to all
120
+ # repositories?
121
+ if property.lazy?
122
+ context = options.fetch(:lazy, :default)
123
+ context = :default if context == true
124
+
125
+ Array(context).each do |item|
126
+ @properties[repository.name].lazy_context(item) << name
127
+ end
128
+ end
129
+
130
+ property
131
+ end
132
+
133
+ # TODO: make this a Set?
134
+ def repositories
135
+ [ repository ] + @properties.keys.collect { |repository_name| DataMapper.repository(repository_name) }
136
+ end
137
+
138
+ def properties(repository_name = default_repository_name)
139
+ @properties[repository_name]
140
+ end
141
+
142
+ def properties_with_subclasses(repository_name = default_repository_name)
143
+ #return properties if we're not interested in sti
144
+ if @properties[repository_name].inheritance_property.nil?
145
+ @properties[repository_name]
146
+ else
147
+ props = @properties[repository_name].dup
148
+ self.child_classes.each do |subclass|
149
+ subclass.properties(repository_name).each do |subprop|
150
+ props << subprop if not props.any? { |prop| prop.name == subprop.name }
151
+ end
152
+ end
153
+ props
154
+ end
155
+ end
156
+
157
+ def key(repository_name = default_repository_name)
158
+ @properties[repository_name].key
159
+ end
160
+
161
+ def inheritance_property(repository_name = default_repository_name)
162
+ @properties[repository_name].inheritance_property
163
+ end
164
+
165
+ def default_order
166
+ @default_order ||= key.map { |property| Query::Direction.new(property) }
167
+ end
168
+
169
+ def get(*key)
170
+ repository.identity_map(self).get(key) || first(to_query(repository, key))
171
+ end
172
+
173
+ def get!(*key)
174
+ get(*key) || raise(ObjectNotFoundError, "Could not find #{self.name} with key #{key.inspect}")
175
+ end
176
+
177
+ def all(query = {})
178
+ query = scoped_query(query)
179
+ query.repository.read_many(query)
180
+ end
181
+
182
+ def first(*args)
183
+ query = args.last.respond_to?(:merge) ? args.pop : {}
184
+ query = scoped_query(query.merge(:limit => args.first || 1))
185
+
186
+ if args.any?
187
+ query.repository.read_many(query)
188
+ else
189
+ query.repository.read_one(query)
190
+ end
191
+ end
192
+
193
+ def [](*key)
194
+ warn("#{name}[] is deprecated. Use #{name}.get! instead.")
195
+ get!(*key)
196
+ end
197
+
198
+ def first_or_create(query, attributes = {})
199
+ first(query) || begin
200
+ resource = allocate
201
+ query = query.dup
202
+
203
+ properties(repository.name).key.each do |property|
204
+ if value = query.delete(property.name)
205
+ resource.send("#{property.name}=", value)
206
+ end
207
+ end
208
+
209
+ resource.attributes = query.merge(attributes)
210
+ resource.save
211
+ resource
212
+ end
213
+ end
214
+
215
+ ##
216
+ # Create an instance of Resource with the given attributes
217
+ #
218
+ # @param <Hash(Symbol => Object)> attributes hash of attributes to set
219
+ def create(attributes = {})
220
+ resource = new(attributes)
221
+ resource.save
222
+ resource
223
+ end
224
+
225
+ ##
226
+ # Dangerous version of #create. Raises if there is a failure
227
+ #
228
+ # @see DataMapper::Resource#create
229
+ # @param <Hash(Symbol => Object)> attributes hash of attributes to set
230
+ # @raise <PersistenceError> The resource could not be saved
231
+ def create!(attributes = {})
232
+ resource = create(attributes)
233
+ raise PersistenceError, "Resource not saved: :new_record => #{resource.new_record?}, :dirty_attributes => #{resource.dirty_attributes.inspect}" if resource.new_record?
234
+ resource
235
+ end
236
+
237
+ # TODO SPEC
238
+ def copy(source, destination, query = {})
239
+ repository(destination) do
240
+ repository(source).read_many(query).each do |resource|
241
+ self.create(resource)
242
+ end
243
+ end
244
+ end
245
+
246
+ # @api private
247
+ # TODO: spec this
248
+ def load(values, query)
249
+ repository = query.repository
250
+ model = self
251
+
252
+ if inheritance_property_index = query.inheritance_property_index(repository)
253
+ model = values.at(inheritance_property_index) || model
254
+ end
255
+
256
+ if key_property_indexes = query.key_property_indexes(repository)
257
+ key_values = values.values_at(*key_property_indexes)
258
+ identity_map = repository.identity_map(model)
259
+
260
+ if resource = identity_map.get(key_values)
261
+ return resource unless query.reload?
262
+ else
263
+ resource = model.allocate
264
+ resource.instance_variable_set(:@repository, repository)
265
+ identity_map.set(key_values, resource)
266
+ end
267
+ else
268
+ resource = model.allocate
269
+ resource.readonly!
270
+ end
271
+
272
+ resource.instance_variable_set(:@new_record, false)
273
+
274
+ query.fields.zip(values) do |property,value|
275
+ value = property.custom? ? property.type.load(value, property) : property.typecast(value)
276
+ property.set!(resource, value)
277
+
278
+ if track = property.track
279
+ case track
280
+ when :hash
281
+ resource.original_values[property.name] = value.dup.hash unless resource.original_values.has_key?(property.name) rescue value.hash
282
+ when :load
283
+ resource.original_values[property.name] = value unless resource.original_values.has_key?(property.name)
284
+ end
285
+ end
286
+ end
287
+
288
+ resource
289
+ end
290
+
291
+ # TODO: spec this
292
+ def to_query(repository, key, query = {})
293
+ conditions = Hash[ *self.key(repository.name).zip(key).flatten ]
294
+ Query.new(repository, self, query.merge(conditions))
295
+ end
296
+
297
+ private
298
+
299
+ def default_storage_name
300
+ self.name
301
+ end
302
+
303
+ def default_repository_name
304
+ Repository.default_name
305
+ end
306
+
307
+ def scoped_query(query = self.query)
308
+ assert_kind_of 'query', query, Query, Hash
309
+
310
+ return self.query if query == self.query
311
+
312
+ query = if query.kind_of?(Hash)
313
+ Query.new(query.has_key?(:repository) ? query.delete(:repository) : self.repository, self, query)
314
+ else
315
+ query
316
+ end
317
+
318
+ self.query ? self.query.merge(query) : query
319
+ end
320
+
321
+ # defines the getter for the property
322
+ def create_property_getter(property)
323
+ class_eval <<-EOS, __FILE__, __LINE__
324
+ #{property.reader_visibility}
325
+ def #{property.getter}
326
+ attribute_get(#{property.name.inspect})
327
+ end
328
+ EOS
329
+
330
+ if property.primitive == TrueClass && !property.model.instance_methods.include?(property.name.to_s)
331
+ class_eval <<-EOS, __FILE__, __LINE__
332
+ #{property.reader_visibility}
333
+ alias #{property.name} #{property.getter}
334
+ EOS
335
+ end
336
+ end
337
+
338
+ # defines the setter for the property
339
+ def create_property_setter(property)
340
+ unless instance_methods.include?(property.name.to_s + '=')
341
+ class_eval <<-EOS, __FILE__, __LINE__
342
+ #{property.writer_visibility}
343
+ def #{property.name}=(value)
344
+ attribute_set(#{property.name.inspect}, value)
345
+ end
346
+ EOS
347
+ end
348
+ end
349
+
350
+ def relationships(*args)
351
+ # DO NOT REMOVE!
352
+ # method_missing depends on these existing. Without this stub,
353
+ # a missing module can cause misleading recursive errors.
354
+ raise NotImplementedError.new
355
+ end
356
+
357
+ def method_missing(method, *args, &block)
358
+ if relationship = self.relationships(repository.name)[method]
359
+ klass = self == relationship.child_model ? relationship.parent_model : relationship.child_model
360
+ return DataMapper::Query::Path.new(repository, [ relationship ], klass)
361
+ end
362
+
363
+ if property = properties(repository.name)[method]
364
+ return property
365
+ end
366
+
367
+ super
368
+ end
369
+
370
+ # TODO: move to dm-more/dm-transactions
371
+ module Transaction
372
+ #
373
+ # Produce a new Transaction for this Resource class
374
+ #
375
+ # @return <DataMapper::Adapters::Transaction
376
+ # a new DataMapper::Adapters::Transaction with all DataMapper::Repositories
377
+ # of the class of this DataMapper::Resource added.
378
+ #-
379
+ # @api public
380
+ #
381
+ # TODO: move to dm-more/dm-transactions
382
+ def transaction(&block)
383
+ DataMapper::Transaction.new(self, &block)
384
+ end
385
+ end # module Transaction
386
+
387
+ include Transaction
388
+
389
+ # TODO: move to dm-more/dm-migrations
390
+ module Migration
391
+ # TODO: move to dm-more/dm-migrations
392
+ def storage_exists?(repository_name = default_repository_name)
393
+ repository(repository_name).storage_exists?(storage_name(repository_name))
394
+ end
395
+ end # module Migration
396
+
397
+ include Migration
398
+ end # module Model
399
+ end # module DataMapper