pgtools 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +25 -25
  3. data/bin/bxm_decoder +2 -2
  4. data/bin/bxm_encoder +2 -2
  5. data/bin/clh_convert +2 -2
  6. data/bin/clp_convert +2 -2
  7. data/bin/clw_convert +2 -2
  8. data/bin/dat_creator +2 -2
  9. data/bin/dat_extractor +2 -2
  10. data/bin/dat_ls +2 -2
  11. data/bin/eff_idd_creator +2 -2
  12. data/bin/eff_idd_extractor +2 -2
  13. data/bin/exp_convert_wiiu_pc +2 -2
  14. data/bin/exp_tool +2 -2
  15. data/bin/mot_convert_wiiu_pc +2 -2
  16. data/bin/mot_tool +2 -2
  17. data/bin/pkz_extractor +2 -2
  18. data/bin/scr_creator +2 -2
  19. data/bin/scr_extractor +2 -2
  20. data/bin/wmb_cleanup +2 -2
  21. data/bin/wmb_common_bones +2 -2
  22. data/bin/wmb_convert_pc_switch +2 -2
  23. data/bin/wmb_convert_wiiu_pc +2 -2
  24. data/bin/wmb_export_assimp +2 -2
  25. data/bin/wmb_get_bone_map +2 -2
  26. data/bin/wmb_import_assimp +2 -2
  27. data/bin/wmb_import_nier +2 -2
  28. data/bin/wmb_import_wiiu +2 -2
  29. data/bin/wtb_convert_wiiu_pc +2 -2
  30. data/bin/wtx_creator +2 -2
  31. data/bin/wtx_extractor +2 -2
  32. data/lib/bayonetta/alignment.rb +0 -0
  33. data/lib/bayonetta/bone.rb +0 -0
  34. data/lib/bayonetta/bxm.rb +180 -180
  35. data/lib/bayonetta/clh.rb +159 -159
  36. data/lib/bayonetta/clp.rb +212 -212
  37. data/lib/bayonetta/clw.rb +166 -166
  38. data/lib/bayonetta/dat.rb +261 -261
  39. data/lib/bayonetta/eff.rb +314 -314
  40. data/lib/bayonetta/endianness.rb +0 -0
  41. data/lib/bayonetta/exp.rb +768 -768
  42. data/lib/bayonetta/linalg.rb +416 -416
  43. data/lib/bayonetta/material_database.yaml +2581 -2581
  44. data/lib/bayonetta/mot.rb +763 -763
  45. data/lib/bayonetta/pkz.rb +63 -63
  46. data/lib/bayonetta/scr.rb +0 -0
  47. data/lib/bayonetta/tools/bxm_decoder.rb +23 -23
  48. data/lib/bayonetta/tools/bxm_encoder.rb +37 -37
  49. data/lib/bayonetta/tools/clh_convert.rb +60 -60
  50. data/lib/bayonetta/tools/clp_convert.rb +70 -70
  51. data/lib/bayonetta/tools/clw_convert.rb +60 -60
  52. data/lib/bayonetta/tools/dat_creator.rb +57 -57
  53. data/lib/bayonetta/tools/dat_extractor.rb +94 -94
  54. data/lib/bayonetta/tools/dat_ls.rb +106 -106
  55. data/lib/bayonetta/tools/eff_idd_creator.rb +66 -66
  56. data/lib/bayonetta/tools/eff_idd_extractor.rb +73 -73
  57. data/lib/bayonetta/tools/exp_convert_wiiu_pc.rb +33 -33
  58. data/lib/bayonetta/tools/exp_tool.rb +48 -48
  59. data/lib/bayonetta/tools/mot_convert_wiiu_pc.rb +33 -33
  60. data/lib/bayonetta/tools/mot_tool.rb +0 -0
  61. data/lib/bayonetta/tools/pkz_extractor.rb +75 -75
  62. data/lib/bayonetta/tools/scr_creator.rb +63 -63
  63. data/lib/bayonetta/tools/scr_extractor.rb +78 -78
  64. data/lib/bayonetta/tools/wmb_cleanup.rb +250 -250
  65. data/lib/bayonetta/tools/wmb_common_bones.rb +45 -45
  66. data/lib/bayonetta/tools/wmb_convert_pc_switch.rb +35 -35
  67. data/lib/bayonetta/tools/wmb_convert_wiiu_pc.rb +33 -33
  68. data/lib/bayonetta/tools/wmb_export_assimp.rb +479 -479
  69. data/lib/bayonetta/tools/wmb_get_bone_map.rb +50 -50
  70. data/lib/bayonetta/tools/wmb_import_assimp.rb +735 -735
  71. data/lib/bayonetta/tools/wmb_import_geometry_wiiu_pc.rb +472 -472
  72. data/lib/bayonetta/tools/wmb_import_nier.rb +309 -309
  73. data/lib/bayonetta/tools/wtb_convert_wiiu_pc.rb +95 -95
  74. data/lib/bayonetta/tools/wtb_import_textures.rb +103 -103
  75. data/lib/bayonetta/tools/wtx_creator.rb +69 -69
  76. data/lib/bayonetta/tools/wtx_extractor.rb +85 -85
  77. data/lib/bayonetta/vertex_types.yaml +0 -0
  78. data/lib/bayonetta/vertex_types2.yaml +0 -0
  79. data/lib/bayonetta/vertex_types_nier.yaml +145 -145
  80. data/lib/bayonetta/wmb.rb +2455 -2443
  81. data/lib/bayonetta/wmb3.rb +759 -759
  82. data/lib/bayonetta/wtb.rb +481 -481
  83. data/lib/bayonetta.rb +60 -60
  84. metadata +2 -2
data/lib/bayonetta/dat.rb CHANGED
@@ -1,261 +1,261 @@
1
- require 'stringio'
2
- module Bayonetta
3
- class DATFile < LibBin::Structure
4
- include Alignment
5
- attr_reader :big
6
- ALIGNMENTS = {
7
- 'dat' => 0x2000,
8
- 'wmb' => 0x1000,
9
- 'wtb' => 0x1000,
10
- 'wtp' => 0x1000,
11
- 'wta' => 0x40,
12
- 'exp' => 0x1000,
13
- 'sop' => 0x40,
14
- 'eff' => 0x1000,
15
- 'sdx' => 0x1000,
16
- 'bxm' => 0x40
17
- }
18
- ALIGNMENTS.default = 0x10
19
-
20
- class Header < LibBin::Structure
21
- string :id, 4
22
- uint32 :num_files
23
- uint32 :offset_file_offsets
24
- uint32 :offset_file_extensions
25
- uint32 :offset_file_names
26
- uint32 :offset_file_sizes
27
- uint32 :offset_hash_map
28
- end
29
-
30
- class HashMap < LibBin::Structure
31
- class Header < LibBin::Structure
32
- uint32 :pre_hash_shift
33
- uint32 :offset_bucket_ranks
34
- uint32 :offset_hashes
35
- uint32 :offset_file_indices
36
- end
37
- register_field :header, Header
38
- int16 :bucket_ranks, count: '(1<<(31 - header\pre_hash_shift))', offset: 'header\offset_bucket_ranks', relative_offset: true
39
- uint32 :hashes, count: '..\header\num_files', offset: 'header\offset_hashes', relative_offset: true
40
- uint16 :file_indices, count: '..\header\num_files', offset: 'header\offset_file_indices', relative_offset: true
41
-
42
- def get
43
- {
44
- pre_hash_shift: @header.pre_hash_shift,
45
- hashes: file_indices.zip(hashes).sort { |(i1 ,h1), (i2, h2)|
46
- i1 <=> i2
47
- }.collect { |i, h|
48
- h
49
- }
50
- }
51
- end
52
-
53
- def initialize
54
- super
55
- @header = Header::new
56
- end
57
-
58
- def set(hash_map)
59
- bit_shift = hash_map[:pre_hash_shift]
60
- hash_list = hash_map[:hashes]
61
- num_files = hash_list.size
62
- @header.pre_hash_shift = bit_shift
63
- buckets = Hash::new { |h, k| h[k] = [] }
64
- hash_list.each_with_index { |h, i|
65
- bucket_index = h >> @header.pre_hash_shift
66
- buckets[bucket_index].push [h, i]
67
- }
68
- @bucket_ranks = []
69
- @hashes = []
70
- @file_indices = []
71
- bucket_rank = 0
72
- num_buckets = (1 << (31 - header.pre_hash_shift))
73
- num_buckets.times { |i|
74
- if buckets.has_key?(i)
75
- @bucket_ranks.push bucket_rank
76
- bucket_rank += buckets[i].size
77
- buckets[i].each { |h, ind|
78
- @hashes.push h
79
- @file_indices.push ind
80
- }
81
- else
82
- @bucket_ranks.push -1
83
- end
84
- }
85
- @header.offset_bucket_ranks = 0x10
86
- @header.offset_hashes = header.offset_bucket_ranks + num_buckets * 2
87
- @header.offset_file_indices = header.offset_hashes + num_files * 4
88
- self
89
- end
90
- end
91
-
92
- register_field :header, Header
93
- uint32 :file_offsets, count: 'header\num_files', offset: 'header\offset_file_offsets'
94
- string :file_extensions, 4, count: 'header\num_files', offset: 'header\offset_file_extensions'
95
- uint32 :file_name_length, offset: 'header\offset_file_names'
96
- string :file_names, count: 'header\num_files', offset: 'header\offset_file_names + 4 + __iterator * file_name_length', sequence: true
97
- uint32 :file_sizes, count: 'header\num_files', offset: 'header\offset_file_sizes'
98
- register_field :hash_map, HashMap, offset: 'header\offset_hash_map'
99
- string :files, 'file_sizes[__iterator]', count: 'header\num_files', offset: 'file_offsets[__iterator]', sequence: true
100
-
101
- def self.is_big?(f)
102
- f.rewind
103
- block = lambda { |big|
104
- h = Header::load(f, big)
105
- h.offset_file_offsets < f.size &&
106
- h.offset_file_names < f.size &&
107
- h.offset_file_sizes < f.size &&
108
- h.offset_hash_map < f.size
109
- }
110
- big = block.call(true)
111
- f.rewind
112
- small = block.call(false)
113
- f.rewind
114
- raise "Invalid data!" unless big ^ small
115
- return big
116
- end
117
-
118
- def initialize(big = false)
119
- @big = big
120
- super()
121
- @header = Header::new
122
- @header.id = "DAT\x00".b
123
- @header.num_files = 0
124
- @header.offset_file_offsets = 0
125
- @header.offset_file_extensions = 0
126
- @header.offset_file_names = 0
127
- @header.offset_file_sizes = 0
128
- @header.offset_hash_map = 0
129
-
130
- @file_offsets = []
131
- @file_extensions = []
132
- @file_name_length = 0
133
- @file_names = []
134
- @file_sizes = []
135
- @files = []
136
-
137
- @hash_map = nil
138
- end
139
-
140
- def invalidate_layout
141
- @header.offset_file_offsets = 0
142
- @header.offset_file_extensions = 0
143
- @header.offset_file_names = 0
144
- @header.offset_file_sizes = 0
145
- @header.offset_hash_map = 0
146
- @file_offsets = []
147
- @hash_map = nil
148
- self
149
- end
150
-
151
- def layout
152
- @file_names.collect { |name| name[0..-2] }
153
- end
154
-
155
- def each
156
- if block_given? then
157
- @header.num_files.times { |i|
158
- yield @file_names[i][0..-2], StringIO::new(@files[i] ? @files[i] : "", "rb")
159
- }
160
- else
161
- to_enum(:each)
162
- end
163
- end
164
-
165
- def [](i)
166
- return [@file_names[i][0..-2], StringIO::new(@files[i] ? @files[i] : "", "rb")]
167
- end
168
-
169
- def push(name, file)
170
- invalidate_layout
171
- @file_names.push name+"\x00"
172
- if file.kind_of?(StringIO)
173
- data = file.string
174
- else
175
- file.rewind
176
- data = file.read
177
- end
178
- @files.push data
179
- @file_sizes.push file.size
180
- extname = File.extname(name)
181
- raise "Invalid name, missing extension!" if extname == ""
182
- @file_extensions.push extname[1..-1]+"\x00"
183
- @header.num_files += 1
184
- self
185
- end
186
-
187
- def compute_layout
188
- @header.offset_file_offsets = 0x20
189
- @header.offset_file_extensions = @header.offset_file_offsets + 4 * @header.num_files
190
- @header.offset_file_names = @header.offset_file_extensions + 4 * @header.num_files
191
- max_file_name_length = @file_names.collect(&:length).max
192
- @file_name_length = max_file_name_length
193
- @header.offset_file_sizes = @header.offset_file_names + 4 + @file_name_length * @header.num_files
194
- @header.offset_file_sizes = align(@header.offset_file_sizes, 4)
195
- if @hash_map
196
- @header.offset_hash_map = @header.offset_file_sizes + 4 * @header.num_files
197
- files_offset = @header.offset_hash_map + @hash_map.__size(@header.offset_hash_map, self)
198
- else
199
- @offset_hash_map = 0
200
- files_offset = @header.offset_file_sizes + 4 * @header.num_files
201
- end
202
- @file_offsets = @header.num_files.times.collect { |i|
203
- if @file_sizes[i] > 0
204
- tmp = align(files_offset, ALIGNMENTS[@file_extensions[i][0..-2]])
205
- files_offset = align(tmp + @file_sizes[i], ALIGNMENTS[@file_extensions[i][0..-2]])
206
- tmp
207
- else
208
- 0
209
- end
210
- }
211
- @total_size = align(files_offset, 0x1000)
212
- self
213
- end
214
-
215
- def set_hash_map(hash)
216
- @hash_map = HashMap::new
217
- @hash_map.set hash
218
- end
219
-
220
- def self.load(input_name)
221
- if input_name.respond_to?(:read) && input_name.respond_to?(:seek)
222
- input = input_name
223
- else
224
- File.open(input_name, "rb") { |f|
225
- input = StringIO::new(f.read, "rb")
226
- }
227
- end
228
- big = self::is_big?(input)
229
- dat = self::new(big)
230
- dat.__load(input, big)
231
- input.close unless input_name.respond_to?(:read) && input_name.respond_to?(:seek)
232
- dat
233
- end
234
-
235
- def dump(output_name)
236
- compute_layout
237
- if output_name.respond_to?(:write) && output_name.respond_to?(:seek)
238
- output = output_name
239
- else
240
- output = StringIO::new("", "wb")#File.open(output_name, "wb")
241
- output.write("\x00"*@total_size)
242
- output.rewind
243
- end
244
- output.rewind
245
-
246
- __set_dump_state(output, @big, nil, nil)
247
- __dump_fields
248
- __unset_dump_state
249
-
250
- unless output_name.respond_to?(:write) && output_name.respond_to?(:seek)
251
- File.open(output_name, "wb") { |f|
252
- f.write output.string
253
- }
254
- output.close
255
- end
256
- self
257
- end
258
-
259
- end
260
-
261
- end
1
+ require 'stringio'
2
+ module Bayonetta
3
+ class DATFile < LibBin::Structure
4
+ include Alignment
5
+ attr_reader :big
6
+ ALIGNMENTS = {
7
+ 'dat' => 0x2000,
8
+ 'wmb' => 0x1000,
9
+ 'wtb' => 0x1000,
10
+ 'wtp' => 0x1000,
11
+ 'wta' => 0x40,
12
+ 'exp' => 0x1000,
13
+ 'sop' => 0x40,
14
+ 'eff' => 0x1000,
15
+ 'sdx' => 0x1000,
16
+ 'bxm' => 0x40
17
+ }
18
+ ALIGNMENTS.default = 0x10
19
+
20
+ class Header < LibBin::Structure
21
+ string :id, 4
22
+ uint32 :num_files
23
+ uint32 :offset_file_offsets
24
+ uint32 :offset_file_extensions
25
+ uint32 :offset_file_names
26
+ uint32 :offset_file_sizes
27
+ uint32 :offset_hash_map
28
+ end
29
+
30
+ class HashMap < LibBin::Structure
31
+ class Header < LibBin::Structure
32
+ uint32 :pre_hash_shift
33
+ uint32 :offset_bucket_ranks
34
+ uint32 :offset_hashes
35
+ uint32 :offset_file_indices
36
+ end
37
+ register_field :header, Header
38
+ int16 :bucket_ranks, count: '(1<<(31 - header\pre_hash_shift))', offset: 'header\offset_bucket_ranks', relative_offset: true
39
+ uint32 :hashes, count: '..\header\num_files', offset: 'header\offset_hashes', relative_offset: true
40
+ uint16 :file_indices, count: '..\header\num_files', offset: 'header\offset_file_indices', relative_offset: true
41
+
42
+ def get
43
+ {
44
+ pre_hash_shift: @header.pre_hash_shift,
45
+ hashes: file_indices.zip(hashes).sort { |(i1 ,h1), (i2, h2)|
46
+ i1 <=> i2
47
+ }.collect { |i, h|
48
+ h
49
+ }
50
+ }
51
+ end
52
+
53
+ def initialize
54
+ super
55
+ @header = Header::new
56
+ end
57
+
58
+ def set(hash_map)
59
+ bit_shift = hash_map[:pre_hash_shift]
60
+ hash_list = hash_map[:hashes]
61
+ num_files = hash_list.size
62
+ @header.pre_hash_shift = bit_shift
63
+ buckets = Hash::new { |h, k| h[k] = [] }
64
+ hash_list.each_with_index { |h, i|
65
+ bucket_index = h >> @header.pre_hash_shift
66
+ buckets[bucket_index].push [h, i]
67
+ }
68
+ @bucket_ranks = []
69
+ @hashes = []
70
+ @file_indices = []
71
+ bucket_rank = 0
72
+ num_buckets = (1 << (31 - header.pre_hash_shift))
73
+ num_buckets.times { |i|
74
+ if buckets.has_key?(i)
75
+ @bucket_ranks.push bucket_rank
76
+ bucket_rank += buckets[i].size
77
+ buckets[i].each { |h, ind|
78
+ @hashes.push h
79
+ @file_indices.push ind
80
+ }
81
+ else
82
+ @bucket_ranks.push -1
83
+ end
84
+ }
85
+ @header.offset_bucket_ranks = 0x10
86
+ @header.offset_hashes = header.offset_bucket_ranks + num_buckets * 2
87
+ @header.offset_file_indices = header.offset_hashes + num_files * 4
88
+ self
89
+ end
90
+ end
91
+
92
+ register_field :header, Header
93
+ uint32 :file_offsets, count: 'header\num_files', offset: 'header\offset_file_offsets'
94
+ string :file_extensions, 4, count: 'header\num_files', offset: 'header\offset_file_extensions'
95
+ uint32 :file_name_length, offset: 'header\offset_file_names'
96
+ string :file_names, count: 'header\num_files', offset: 'header\offset_file_names + 4 + __iterator * file_name_length', sequence: true
97
+ uint32 :file_sizes, count: 'header\num_files', offset: 'header\offset_file_sizes'
98
+ register_field :hash_map, HashMap, offset: 'header\offset_hash_map'
99
+ string :files, 'file_sizes[__iterator]', count: 'header\num_files', offset: 'file_offsets[__iterator]', sequence: true
100
+
101
+ def self.is_big?(f)
102
+ f.rewind
103
+ block = lambda { |big|
104
+ h = Header::load(f, big)
105
+ h.offset_file_offsets < f.size &&
106
+ h.offset_file_names < f.size &&
107
+ h.offset_file_sizes < f.size &&
108
+ h.offset_hash_map < f.size
109
+ }
110
+ big = block.call(true)
111
+ f.rewind
112
+ small = block.call(false)
113
+ f.rewind
114
+ raise "Invalid data!" unless big ^ small
115
+ return big
116
+ end
117
+
118
+ def initialize(big = false)
119
+ @big = big
120
+ super()
121
+ @header = Header::new
122
+ @header.id = "DAT\x00".b
123
+ @header.num_files = 0
124
+ @header.offset_file_offsets = 0
125
+ @header.offset_file_extensions = 0
126
+ @header.offset_file_names = 0
127
+ @header.offset_file_sizes = 0
128
+ @header.offset_hash_map = 0
129
+
130
+ @file_offsets = []
131
+ @file_extensions = []
132
+ @file_name_length = 0
133
+ @file_names = []
134
+ @file_sizes = []
135
+ @files = []
136
+
137
+ @hash_map = nil
138
+ end
139
+
140
+ def invalidate_layout
141
+ @header.offset_file_offsets = 0
142
+ @header.offset_file_extensions = 0
143
+ @header.offset_file_names = 0
144
+ @header.offset_file_sizes = 0
145
+ @header.offset_hash_map = 0
146
+ @file_offsets = []
147
+ @hash_map = nil
148
+ self
149
+ end
150
+
151
+ def layout
152
+ @file_names.collect { |name| name[0..-2] }
153
+ end
154
+
155
+ def each
156
+ if block_given? then
157
+ @header.num_files.times { |i|
158
+ yield @file_names[i][0..-2], StringIO::new(@files[i] ? @files[i] : "", "rb")
159
+ }
160
+ else
161
+ to_enum(:each)
162
+ end
163
+ end
164
+
165
+ def [](i)
166
+ return [@file_names[i][0..-2], StringIO::new(@files[i] ? @files[i] : "", "rb")]
167
+ end
168
+
169
+ def push(name, file)
170
+ invalidate_layout
171
+ @file_names.push name+"\x00"
172
+ if file.kind_of?(StringIO)
173
+ data = file.string
174
+ else
175
+ file.rewind
176
+ data = file.read
177
+ end
178
+ @files.push data
179
+ @file_sizes.push file.size
180
+ extname = File.extname(name)
181
+ raise "Invalid name, missing extension!" if extname == ""
182
+ @file_extensions.push extname[1..-1]+"\x00"
183
+ @header.num_files += 1
184
+ self
185
+ end
186
+
187
+ def compute_layout
188
+ @header.offset_file_offsets = 0x20
189
+ @header.offset_file_extensions = @header.offset_file_offsets + 4 * @header.num_files
190
+ @header.offset_file_names = @header.offset_file_extensions + 4 * @header.num_files
191
+ max_file_name_length = @file_names.collect(&:length).max
192
+ @file_name_length = max_file_name_length
193
+ @header.offset_file_sizes = @header.offset_file_names + 4 + @file_name_length * @header.num_files
194
+ @header.offset_file_sizes = align(@header.offset_file_sizes, 4)
195
+ if @hash_map
196
+ @header.offset_hash_map = @header.offset_file_sizes + 4 * @header.num_files
197
+ files_offset = @header.offset_hash_map + @hash_map.__size(@header.offset_hash_map, self)
198
+ else
199
+ @offset_hash_map = 0
200
+ files_offset = @header.offset_file_sizes + 4 * @header.num_files
201
+ end
202
+ @file_offsets = @header.num_files.times.collect { |i|
203
+ if @file_sizes[i] > 0
204
+ tmp = align(files_offset, ALIGNMENTS[@file_extensions[i][0..-2]])
205
+ files_offset = align(tmp + @file_sizes[i], ALIGNMENTS[@file_extensions[i][0..-2]])
206
+ tmp
207
+ else
208
+ 0
209
+ end
210
+ }
211
+ @total_size = align(files_offset, 0x1000)
212
+ self
213
+ end
214
+
215
+ def set_hash_map(hash)
216
+ @hash_map = HashMap::new
217
+ @hash_map.set hash
218
+ end
219
+
220
+ def self.load(input_name)
221
+ if input_name.respond_to?(:read) && input_name.respond_to?(:seek)
222
+ input = input_name
223
+ else
224
+ File.open(input_name, "rb") { |f|
225
+ input = StringIO::new(f.read, "rb")
226
+ }
227
+ end
228
+ big = self::is_big?(input)
229
+ dat = self::new(big)
230
+ dat.__load(input, big)
231
+ input.close unless input_name.respond_to?(:read) && input_name.respond_to?(:seek)
232
+ dat
233
+ end
234
+
235
+ def dump(output_name)
236
+ compute_layout
237
+ if output_name.respond_to?(:write) && output_name.respond_to?(:seek)
238
+ output = output_name
239
+ else
240
+ output = StringIO::new("", "wb")#File.open(output_name, "wb")
241
+ output.write("\x00"*@total_size)
242
+ output.rewind
243
+ end
244
+ output.rewind
245
+
246
+ __set_dump_state(output, @big, nil, nil)
247
+ __dump_fields
248
+ __unset_dump_state
249
+
250
+ unless output_name.respond_to?(:write) && output_name.respond_to?(:seek)
251
+ File.open(output_name, "wb") { |f|
252
+ f.write output.string
253
+ }
254
+ output.close
255
+ end
256
+ self
257
+ end
258
+
259
+ end
260
+
261
+ end