artbase 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,484 +1,297 @@
1
-
2
-
3
- class Collection ## todo/check - change to OpenseaCollection or such - why? why not?
4
-
5
- attr_reader :slug, :count
6
-
7
- # check: rename count to items or such - why? why not?
8
- # default format to '24x24' - why? why not?
9
- def initialize( slug, count,
10
- meta_slugify: nil,
11
- image_pixelate: nil,
12
- patch: nil,
13
- exclude: [],
14
- format:,
15
- source: )
16
- @slug = slug
17
- @count = count
18
-
19
- @meta_slugify = meta_slugify
20
- @image_pixelate = image_pixelate
21
-
22
- @patch = patch
23
-
24
- @exclude = exclude
25
-
26
- @width, @height = _parse_dimension( format )
27
-
28
-
29
- ## note: allow multiple source formats / dimensions
30
- ### e.g. convert 512x512 into [ [512,512] ]
31
- ##
32
- source = [source] unless source.is_a?( Array )
33
- @sources = source.map { |dimension| _parse_dimension( dimension ) }
34
- end
35
-
36
- ## e.g. convert dimension (width x height) "24x24" or "24 x 24" to [24,24]
37
- def _parse_dimension( str )
38
- str.split( /x/i ).map { |str| str.strip.to_i }
39
- end
40
-
41
-
42
- def _image_pixelate( img )
43
- if @image_pixelate
44
- @image_pixelate.call( img )
45
- else
46
- @sources.each do |source_width, source_height|
47
- if img.width == source_width && img.height == source_height
48
- return img.pixelate( from: "#{source_width}x#{source_height}",
49
- to: "#{@width}x#{@height}" )
50
- end
51
- end
52
-
53
- puts "!! ERROR - unknown image dimension #{img.width}x#{img.height}; sorry"
54
- puts " supported source dimensions include: #{@sources.inspect}"
55
- exit 1
56
- end
57
- end
58
-
59
-
60
-
61
-
62
- def download_meta( range=(0...@count) )
63
- self.class.download_meta( range, @slug )
64
- end
65
-
66
- def download_images( range=(0...@count) )
67
- self.class.download_images( range, @slug )
68
- end
69
-
70
- def download( range=(0...@count) )
71
- download_meta( range )
72
- download_images( range )
73
- end
74
-
75
-
76
-
77
-
78
-
79
- def make_composite
80
- ### use well-known / pre-defined (default) grids (cols x rows) for now - why? why not?
81
-
82
- composite_count = @count - @exclude.size
83
- cols, rows = if composite_count == 100 then [10, 10]
84
- elsif composite_count == 500 then [25, 20]
85
- elsif composite_count == 10000 then [100, 100]
86
- else
87
- raise ArgumentError, "sorry - unknown composite count #{composite_count}/#{@count} for now"
88
- end
89
-
90
- composite = ImageComposite.new( cols, rows,
91
- width: @width,
92
- height: @height )
93
-
94
- ## note: for now as a convention (auto-)add all .pngs
95
- ## in saved in (default) pixelate output dir
96
- composite.add_glob( "./#{@slug}/ii/*.png" )
97
-
98
- composite.save( "./#{@slug}/tmp/#{@slug}-#{@width}x#{@height}.png" )
99
- end
100
-
101
-
102
-
103
- def _meta_slugify_match( regex, meta, index )
104
- if m=regex.match( meta.name )
105
- captures = m.named_captures ## get named captures in match data as hash (keys as strings)
106
- # e.g.
107
- #=> {"num"=>"3"}
108
- #=> {"num"=>"498", "name"=>"Doge"}
109
- pp captures
110
-
111
- num = captures['num'] ? captures['num'].to_i( 10 ) : nil ## note: add base 10 (e.g. 015=>15)
112
- name = captures['name'] ? captures['name'].strip : nil
113
-
114
- slug = ''
115
- if num
116
- slug << "%06d" % num ## todo/check: always fill/zero-pad with six 000000's - why? why not?
117
- end
118
-
119
- if name
120
- slug << "-" if num ## add separator
121
- slug << slugify( name )
122
- end
123
- slug
124
- else
125
- nil ## note: return nil if no match / slug
126
- end
127
- end
128
-
129
- def _do_meta_slugify( meta_slugify, meta, index )
130
- if meta_slugify.is_a?( Regexp )
131
- _meta_slugify_match( meta_slugify, meta, index )
132
- elsif meta_slugify.is_a?( Proc )
133
- meta_slugify.call( meta, index )
134
- else
135
- raise ArgumentError, "meta_slugify - unsupported type: #{meta_slugify.class.name}"
136
- end
137
- end
138
-
139
-
140
- def _meta_slugify( meta, index )
141
- slug = nil
142
-
143
- if @meta_slugify.is_a?( Array )
144
- @meta_slugify.each do |meta_slugify|
145
- slug = _do_meta_slugify( meta_slugify, meta, index )
146
- return slug if slug ## note: short-circuit on first match
147
- ## use break instead of return - why? why not?
148
- end
149
- else ## assume object e.g. Regexp, Proc, etc.
150
- slug = _do_meta_slugify( @meta_slugify, meta, index )
151
- end
152
-
153
- ## do nothing
154
- if slug.nil?
155
- puts "!! ERROR - cannot find id in >#{meta.name}<:"
156
- pp meta
157
- exit 1
158
- end
159
-
160
- slug
161
- end
162
-
163
-
164
-
165
- def each_meta( range=(0...@count),
166
- exclude: true, &blk )
167
- range.each do |id| ## todo/fix: change id to index
168
- meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
169
-
170
- ####
171
- # filter out/skip
172
- if exclude && @exclude.include?( meta.name )
173
- puts " skipping / exclude #{id} >#{meta.name}<..."
174
- next
175
- end
176
-
177
- blk.call( meta, id )
178
- end
179
- end
180
-
181
-
182
- def _normalize_trait_type( trait_type )
183
- if @patch && @patch[:trait_types]
184
- @patch[:trait_types][ trait_type ] || trait_type
185
- else
186
- trait_type
187
- end
188
- end
189
-
190
- def _normalize_trait_value( trait_value )
191
- if @patch && @patch[:trait_values]
192
- @patch[:trait_values][ trait_value ] || trait_value
193
- else
194
- trait_value
195
- end
196
- end
197
-
198
-
199
-
200
- def export_attributes
201
- ## step 1: get counters
202
- stats = calc_attribute_counters
203
-
204
- total = stats[:total]
205
- counter = stats[:traits]
206
-
207
- puts
208
- puts "attribute usage / counts:"
209
- pp total
210
- puts
211
-
212
- puts "#{counter.size} attribute(s):"
213
- counter.each do |trait_name, trait_rec|
214
- puts " #{trait_name} #{trait_rec[:count]} (#{trait_rec[:by_type].size} uniques)"
215
- end
216
-
217
- recs = []
218
-
219
-
220
- ## step 2: get tabular data
221
- each_meta do |meta, id| ## todo/fix: change id to index
222
-
223
- traits = meta.traits
224
- # print "#{traits.size} - "
225
- # pp traits
226
-
227
- print "#{id}.." if id % 100 == 0 ## print progress report
228
-
229
- ## setup empty hash table (with all attributes)
230
- rec = {
231
- 'Slug' => _meta_slugify( meta, id ),
232
- 'Name' => meta.name,
233
- }
234
- ## add all attributes
235
- counter.keys.reduce( rec ) { |h,value| h[value] = []; h }
236
- ## pp rec
237
-
238
- ## note: use an array (to allow multiple values for attributes)
239
- traits.each do |trait_type, trait_value|
240
- trait_type = _normalize_trait_type( trait_type )
241
- trait_value = _normalize_trait_value( trait_value )
242
-
243
- values = rec[ trait_type ]
244
- values << trait_value
245
- end
246
- recs << rec
247
- end
248
- print "\n"
249
-
250
- ## pp recs
251
-
252
- ## flatten recs
253
- data = []
254
- recs.each do |rec|
255
- row = rec.values.map do |value|
256
- if value.is_a?( Array )
257
- value.join( ' / ' )
258
- else
259
- value
260
- end
261
- end
262
- data << row
263
- end
264
-
265
-
266
- ## sort by slug
267
- data = data.sort {|l,r| l[0] <=> r[0] }
268
- pp data
269
-
270
- ### save dataset
271
- ## note: change first colum Slug to ID - only used for "internal" sort etc.
272
- headers = ['ID', 'Name'] + counter.keys ## add header row
273
-
274
- path = "./#{@slug}/tmp/#{@slug}.csv"
275
- dirname = File.dirname( path )
276
- FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
277
-
278
- File.open( path, 'w:utf-8' ) do |f|
279
- f.write( headers.join( ', ' ))
280
- f.write( "\n" )
281
- ## note: replace ID with our own internal running (zero-based) counter
282
- data.each_with_index do |row,i|
283
- f.write( ([i]+row[1..-1]).join( ', '))
284
- f.write( "\n" )
285
- end
286
- end
287
- end
288
-
289
-
290
- def calc_attribute_counters ## todo/check: use a different name _counts/_stats etc - why? why not?
291
-
292
- attributes_by_count = { count: 0,
293
- by_count: Hash.new(0)
294
- }
295
- counter = {}
296
-
297
-
298
- each_meta do |meta, id| ## todo/fix: change id to index
299
- traits = meta.traits
300
- # print "#{traits.size} - "
301
- # pp traits
302
-
303
- print "#{id}.." if id % 100 == 0 ## print progress report
304
-
305
- attributes_by_count[ :count ] +=1
306
- attributes_by_count[ :by_count ][ traits.size ] += 1
307
-
308
- traits.each do |trait_type, trait_value|
309
- trait_type = _normalize_trait_type( trait_type )
310
- trait_value = _normalize_trait_value( trait_value )
311
-
312
-
313
- rec = counter[ trait_type ] ||= { count: 0,
314
- by_type: Hash.new(0)
315
- }
316
- rec[ :count ] +=1
317
- rec[ :by_type ][ trait_value ] += 1
318
- end
319
- end
320
-
321
- print "\n"
322
- puts
323
-
324
- ## return all-in-one hash
325
- {
326
- total: attributes_by_count,
327
- traits: counter,
328
- }
329
- end
330
-
331
-
332
- def dump_attributes
333
- stats = calc_attribute_counters
334
-
335
- total = stats[:total]
336
- counter = stats[:traits]
337
-
338
- puts
339
- puts "attribute usage / counts:"
340
- pp total
341
- puts
342
-
343
- puts "#{counter.size} attribute(s):"
344
- counter.each do |trait_name, trait_rec|
345
- puts " #{trait_name} #{trait_rec[:count]} (#{trait_rec[:by_type].size} uniques)"
346
- end
347
-
348
- puts
349
- pp counter
350
- end
351
-
352
-
353
-
354
- def pixelate( range=(0...@count) )
355
-
356
- meta_slugs = Hash.new( 0 ) ## deduplicate (auto-add counter if duplicate)
357
-
358
- ### todo/fix: must read slugs starting at 0
359
- ### to work for deduplicate!!!!!!
360
-
361
-
362
- range.each do |id|
363
- meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
364
-
365
- ####
366
- # filter out/skip
367
- if @exclude.include?( meta.name )
368
- puts " skipping / exclude #{id} >#{meta.name}<..."
369
- next
370
- end
371
-
372
- puts meta.name
373
-
374
-
375
- meta_slug = _meta_slugify( meta, id )
376
- count = meta_slugs[ meta_slug ] += 1
377
-
378
- meta_slug = "#{meta_slug}_(#{count})" if count > 1
379
-
380
-
381
- img = Image.read( "./#{@slug}/i/#{id}.png" )
382
-
383
- pix = _image_pixelate( img )
384
-
385
- path = "./#{@slug}/ii/#{meta_slug}.png"
386
- puts " saving to >#{path}<..."
387
- pix.save( path )
388
- end
389
- end
390
-
391
-
392
-
393
- ################################
394
- # private (static) helpers
395
- #
396
-
397
- def self.download_images( range, collection,
398
- original: false )
399
- start = Time.now
400
- delay_in_s = 0.3
401
-
402
- range.each do |offset|
403
- meta = OpenSea::Meta.read( "./#{collection}/meta/#{offset}.json" )
404
-
405
- puts "==> #{offset}.json - #{meta.name}"
406
-
407
- image_src = if original
408
- meta.image_original_url
409
- else
410
- meta.image_url
411
- end
412
-
413
- puts " >#{image_src}<"
414
- if image_src.nil?
415
- puts "!! ERROR - no image url found (use original: #{original}):"
416
- pp meta
417
- exit 1
418
- end
419
-
420
- ## note: use a different directory to avoid size confusion!!!
421
- img_slug = if original
422
- 'i_org'
423
- else
424
- 'i'
425
- end
426
-
427
- ## note: will auto-add format file extension (e.g. .png, .jpg)
428
- ## depending on http content type!!!!!
429
- copy_image( image_src, "./#{collection}/#{img_slug}/#{offset}" )
430
-
431
- stop = Time.now
432
- diff = stop - start
433
-
434
- mins = diff / 60 ## todo - use floor or such?
435
- secs = diff % 60
436
- puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
437
-
438
- puts "sleeping #{delay_in_s}s..."
439
- sleep( delay_in_s )
440
- end
441
- end
442
-
443
-
444
- def self.download_meta( range, collection )
445
- start = Time.now
446
- delay_in_s = 0.3
447
-
448
- range.each do |offset|
449
-
450
- dest = "./#{collection}/meta/#{offset}.json"
451
- meta = nil
452
-
453
- puts "==> #{offset} / #{collection} (#{dest})..."
454
-
455
- data = OpenSea.assets( collection: collection,
456
- offset: offset )
457
- meta = OpenSea::Meta.new( data )
458
- puts " name: >#{meta.name}<"
459
- puts " image_url: >#{meta.image_url}<"
460
-
461
-
462
- ## make sure path exists
463
- dirname = File.dirname( dest )
464
- FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
465
-
466
- File.open( dest, "w:utf-8" ) do |f|
467
- f.write( JSON.pretty_generate( data ) )
468
- end
469
-
470
-
471
- stop = Time.now
472
- diff = stop - start
473
-
474
- mins = diff / 60 ## todo - use floor or such?
475
- secs = diff % 60
476
- puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
477
-
478
- puts " sleeping #{delay_in_s}s..."
479
- sleep( delay_in_s )
480
- end
481
- end
482
-
483
-
484
- end # class Collection
1
+
2
+
3
+ class Collection ## todo/check - change to OpenseaCollection or such - why? why not?
4
+
5
+ attr_reader :slug, :count
6
+
7
+ # check: rename count to items or such - why? why not?
8
+ # default format to '24x24' - why? why not?
9
+ def initialize( slug, count,
10
+ meta_slugify: nil,
11
+ image_pixelate: nil,
12
+ patch: nil,
13
+ exclude: [],
14
+ format:,
15
+ source: )
16
+ @slug = slug
17
+ @count = count
18
+
19
+ @meta_slugify = meta_slugify
20
+ @image_pixelate = image_pixelate
21
+
22
+ @patch = patch
23
+
24
+ @exclude = exclude
25
+
26
+ @width, @height = _parse_dimension( format )
27
+
28
+
29
+ ## note: allow multiple source formats / dimensions
30
+ ### e.g. convert 512x512 into [ [512,512] ]
31
+ ##
32
+ source = [source] unless source.is_a?( Array )
33
+ @sources = source.map { |dimension| _parse_dimension( dimension ) }
34
+ end
35
+
36
+ ## e.g. convert dimension (width x height) "24x24" or "24 x 24" to [24,24]
37
+ def _parse_dimension( str )
38
+ str.split( /x/i ).map { |str| str.strip.to_i }
39
+ end
40
+
41
+
42
+ def _image_pixelate( img )
43
+ if @image_pixelate
44
+ @image_pixelate.call( img )
45
+ else
46
+ @sources.each do |source_width, source_height|
47
+ if img.width == source_width && img.height == source_height
48
+ from = "#{source_width}x#{source_height}"
49
+ to = "#{@width}x#{@height}"
50
+ steps = (Image::DOwNSAMPLING_STEPS[ to ] || {})[ from ]
51
+ if steps.nil?
52
+ puts "!! ERROR - no sampling steps defined for #{from} to #{to}; sorry"
53
+ exit 1
54
+ end
55
+
56
+ return img.pixelate( steps )
57
+ end
58
+ end
59
+
60
+ puts "!! ERROR - unknown image dimension #{img.width}x#{img.height}; sorry"
61
+ puts " supported source dimensions include: #{@sources.inspect}"
62
+ exit 1
63
+ end
64
+ end
65
+
66
+
67
+
68
+
69
+ def download_meta( range=(0...@count) )
70
+ self.class.download_meta( range, @slug )
71
+ end
72
+
73
+ def download_images( range=(0...@count) )
74
+ self.class.download_images( range, @slug )
75
+ end
76
+
77
+ def download( range=(0...@count) )
78
+ download_meta( range )
79
+ download_images( range )
80
+ end
81
+
82
+
83
+
84
+
85
+
86
+ def _meta_slugify_match( regex, meta, index )
87
+ if m=regex.match( meta.name )
88
+ captures = m.named_captures ## get named captures in match data as hash (keys as strings)
89
+ # e.g.
90
+ #=> {"num"=>"3"}
91
+ #=> {"num"=>"498", "name"=>"Doge"}
92
+ pp captures
93
+
94
+ num = captures['num'] ? captures['num'].to_i( 10 ) : nil ## note: add base 10 (e.g. 015=>15)
95
+ name = captures['name'] ? captures['name'].strip : nil
96
+
97
+ slug = ''
98
+ if num
99
+ slug << "%06d" % num ## todo/check: always fill/zero-pad with six 000000's - why? why not?
100
+ end
101
+
102
+ if name
103
+ slug << "-" if num ## add separator
104
+ slug << slugify( name )
105
+ end
106
+ slug
107
+ else
108
+ nil ## note: return nil if no match / slug
109
+ end
110
+ end
111
+
112
+ def _do_meta_slugify( meta_slugify, meta, index )
113
+ if meta_slugify.is_a?( Regexp )
114
+ _meta_slugify_match( meta_slugify, meta, index )
115
+ elsif meta_slugify.is_a?( Proc )
116
+ meta_slugify.call( meta, index )
117
+ else
118
+ raise ArgumentError, "meta_slugify - unsupported type: #{meta_slugify.class.name}"
119
+ end
120
+ end
121
+
122
+
123
+ def _meta_slugify( meta, index )
124
+ slug = nil
125
+
126
+ if @meta_slugify.is_a?( Array )
127
+ @meta_slugify.each do |meta_slugify|
128
+ slug = _do_meta_slugify( meta_slugify, meta, index )
129
+ return slug if slug ## note: short-circuit on first match
130
+ ## use break instead of return - why? why not?
131
+ end
132
+ else ## assume object e.g. Regexp, Proc, etc.
133
+ slug = _do_meta_slugify( @meta_slugify, meta, index )
134
+ end
135
+
136
+ ## do nothing
137
+ if slug.nil?
138
+ puts "!! ERROR - cannot find id in >#{meta.name}<:"
139
+ pp meta
140
+ exit 1
141
+ end
142
+
143
+ slug
144
+ end
145
+
146
+
147
+
148
+ def each_meta( range=(0...@count),
149
+ exclude: true, &blk )
150
+ range.each do |id| ## todo/fix: change id to index
151
+ meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
152
+
153
+ ####
154
+ # filter out/skip
155
+ if exclude && @exclude.include?( meta.name )
156
+ puts " skipping / exclude #{id} >#{meta.name}<..."
157
+ next
158
+ end
159
+
160
+ blk.call( meta, id )
161
+ end
162
+ end
163
+
164
+
165
+
166
+
167
+ def pixelate( range=(0...@count) )
168
+
169
+ meta_slugs = Hash.new( 0 ) ## deduplicate (auto-add counter if duplicate)
170
+
171
+ ### todo/fix: must read slugs starting at 0
172
+ ### to work for deduplicate!!!!!!
173
+
174
+
175
+ range.each do |id|
176
+ meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
177
+
178
+ ####
179
+ # filter out/skip
180
+ if @exclude.include?( meta.name )
181
+ puts " skipping / exclude #{id} >#{meta.name}<..."
182
+ next
183
+ end
184
+
185
+ puts meta.name
186
+
187
+
188
+ meta_slug = _meta_slugify( meta, id )
189
+ count = meta_slugs[ meta_slug ] += 1
190
+
191
+ meta_slug = "#{meta_slug}_(#{count})" if count > 1
192
+
193
+
194
+ img = Image.read( "./#{@slug}/i/#{id}.png" )
195
+
196
+ pix = _image_pixelate( img )
197
+
198
+ path = "./#{@slug}/ii/#{meta_slug}.png"
199
+ puts " saving to >#{path}<..."
200
+ pix.save( path )
201
+ end
202
+ end
203
+
204
+
205
+
206
+ ################################
207
+ # private (static) helpers
208
+ #
209
+
210
+ def self.download_images( range, collection,
211
+ original: false )
212
+ start = Time.now
213
+ delay_in_s = 0.3
214
+
215
+ range.each do |offset|
216
+ meta = OpenSea::Meta.read( "./#{collection}/meta/#{offset}.json" )
217
+
218
+ puts "==> #{offset}.json - #{meta.name}"
219
+
220
+ image_src = if original
221
+ meta.image_original_url
222
+ else
223
+ meta.image_url
224
+ end
225
+
226
+ puts " >#{image_src}<"
227
+ if image_src.nil?
228
+ puts "!! ERROR - no image url found (use original: #{original}):"
229
+ pp meta
230
+ exit 1
231
+ end
232
+
233
+ ## note: use a different directory to avoid size confusion!!!
234
+ img_slug = if original
235
+ 'i_org'
236
+ else
237
+ 'i'
238
+ end
239
+
240
+ ## note: will auto-add format file extension (e.g. .png, .jpg)
241
+ ## depending on http content type!!!!!
242
+ copy_image( image_src, "./#{collection}/#{img_slug}/#{offset}" )
243
+
244
+ stop = Time.now
245
+ diff = stop - start
246
+
247
+ mins = diff / 60 ## todo - use floor or such?
248
+ secs = diff % 60
249
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
250
+
251
+ puts "sleeping #{delay_in_s}s..."
252
+ sleep( delay_in_s )
253
+ end
254
+ end
255
+
256
+
257
+ def self.download_meta( range, collection )
258
+ start = Time.now
259
+ delay_in_s = 0.3
260
+
261
+ range.each do |offset|
262
+
263
+ dest = "./#{collection}/meta/#{offset}.json"
264
+ meta = nil
265
+
266
+ puts "==> #{offset} / #{collection} (#{dest})..."
267
+
268
+ data = OpenSea.assets( collection: collection,
269
+ offset: offset )
270
+ meta = OpenSea::Meta.new( data )
271
+ puts " name: >#{meta.name}<"
272
+ puts " image_url: >#{meta.image_url}<"
273
+
274
+
275
+ ## make sure path exists
276
+ dirname = File.dirname( dest )
277
+ FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
278
+
279
+ File.open( dest, "w:utf-8" ) do |f|
280
+ f.write( JSON.pretty_generate( data ) )
281
+ end
282
+
283
+
284
+ stop = Time.now
285
+ diff = stop - start
286
+
287
+ mins = diff / 60 ## todo - use floor or such?
288
+ secs = diff % 60
289
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
290
+
291
+ puts " sleeping #{delay_in_s}s..."
292
+ sleep( delay_in_s )
293
+ end
294
+ end
295
+
296
+
297
+ end # class Collection