artbase 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,484 @@
1
+
2
+
3
+ class Collection ## todo/check - change to OpenseaCollection or such - why? why not?
4
+
5
+ attr_reader :slug, :count
6
+
7
+ # check: rename count to items or such - why? why not?
8
+ # default format to '24x24' - why? why not?
9
+ def initialize( slug, count,
10
+ meta_slugify: nil,
11
+ image_pixelate: nil,
12
+ patch: nil,
13
+ exclude: [],
14
+ format:,
15
+ source: )
16
+ @slug = slug
17
+ @count = count
18
+
19
+ @meta_slugify = meta_slugify
20
+ @image_pixelate = image_pixelate
21
+
22
+ @patch = patch
23
+
24
+ @exclude = exclude
25
+
26
+ @width, @height = _parse_dimension( format )
27
+
28
+
29
+ ## note: allow multiple source formats / dimensions
30
+ ### e.g. convert 512x512 into [ [512,512] ]
31
+ ##
32
+ source = [source] unless source.is_a?( Array )
33
+ @sources = source.map { |dimension| _parse_dimension( dimension ) }
34
+ end
35
+
36
+ ## e.g. convert dimension (width x height) "24x24" or "24 x 24" to [24,24]
37
+ def _parse_dimension( str )
38
+ str.split( /x/i ).map { |str| str.strip.to_i }
39
+ end
40
+
41
+
42
+ def _image_pixelate( img )
43
+ if @image_pixelate
44
+ @image_pixelate.call( img )
45
+ else
46
+ @sources.each do |source_width, source_height|
47
+ if img.width == source_width && img.height == source_height
48
+ return img.pixelate( from: "#{source_width}x#{source_height}",
49
+ to: "#{@width}x#{@height}" )
50
+ end
51
+ end
52
+
53
+ puts "!! ERROR - unknown image dimension #{img.width}x#{img.height}; sorry"
54
+ puts " supported source dimensions include: #{@sources.inspect}"
55
+ exit 1
56
+ end
57
+ end
58
+
59
+
60
+
61
+
62
+ def download_meta( range=(0...@count) )
63
+ self.class.download_meta( range, @slug )
64
+ end
65
+
66
+ def download_images( range=(0...@count) )
67
+ self.class.download_images( range, @slug )
68
+ end
69
+
70
+ def download( range=(0...@count) )
71
+ download_meta( range )
72
+ download_images( range )
73
+ end
74
+
75
+
76
+
77
+
78
+
79
+ def make_composite
80
+ ### use well-known / pre-defined (default) grids (cols x rows) for now - why? why not?
81
+
82
+ composite_count = @count - @exclude.size
83
+ cols, rows = if composite_count == 100 then [10, 10]
84
+ elsif composite_count == 500 then [25, 20]
85
+ elsif composite_count == 10000 then [100, 100]
86
+ else
87
+ raise ArgumentError, "sorry - unknown composite count #{composite_count}/#{@count} for now"
88
+ end
89
+
90
+ composite = ImageComposite.new( cols, rows,
91
+ width: @width,
92
+ height: @height )
93
+
94
+ ## note: for now as a convention (auto-)add all .pngs
95
+ ## in saved in (default) pixelate output dir
96
+ composite.add_glob( "./#{@slug}/ii/*.png" )
97
+
98
+ composite.save( "./#{@slug}/tmp/#{@slug}-#{@width}x#{@height}.png" )
99
+ end
100
+
101
+
102
+
103
+ def _meta_slugify_match( regex, meta, index )
104
+ if m=regex.match( meta.name )
105
+ captures = m.named_captures ## get named captures in match data as hash (keys as strings)
106
+ # e.g.
107
+ #=> {"num"=>"3"}
108
+ #=> {"num"=>"498", "name"=>"Doge"}
109
+ pp captures
110
+
111
+ num = captures['num'] ? captures['num'].to_i( 10 ) : nil ## note: add base 10 (e.g. 015=>15)
112
+ name = captures['name'] ? captures['name'].strip : nil
113
+
114
+ slug = ''
115
+ if num
116
+ slug << "%06d" % num ## todo/check: always fill/zero-pad with six 000000's - why? why not?
117
+ end
118
+
119
+ if name
120
+ slug << "-" if num ## add separator
121
+ slug << slugify( name )
122
+ end
123
+ slug
124
+ else
125
+ nil ## note: return nil if no match / slug
126
+ end
127
+ end
128
+
129
+ def _do_meta_slugify( meta_slugify, meta, index )
130
+ if meta_slugify.is_a?( Regexp )
131
+ _meta_slugify_match( meta_slugify, meta, index )
132
+ elsif meta_slugify.is_a?( Proc )
133
+ meta_slugify.call( meta, index )
134
+ else
135
+ raise ArgumentError, "meta_slugify - unsupported type: #{meta_slugify.class.name}"
136
+ end
137
+ end
138
+
139
+
140
+ def _meta_slugify( meta, index )
141
+ slug = nil
142
+
143
+ if @meta_slugify.is_a?( Array )
144
+ @meta_slugify.each do |meta_slugify|
145
+ slug = _do_meta_slugify( meta_slugify, meta, index )
146
+ return slug if slug ## note: short-circuit on first match
147
+ ## use break instead of return - why? why not?
148
+ end
149
+ else ## assume object e.g. Regexp, Proc, etc.
150
+ slug = _do_meta_slugify( @meta_slugify, meta, index )
151
+ end
152
+
153
+ ## do nothing
154
+ if slug.nil?
155
+ puts "!! ERROR - cannot find id in >#{meta.name}<:"
156
+ pp meta
157
+ exit 1
158
+ end
159
+
160
+ slug
161
+ end
162
+
163
+
164
+
165
+ def each_meta( range=(0...@count),
166
+ exclude: true, &blk )
167
+ range.each do |id| ## todo/fix: change id to index
168
+ meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
169
+
170
+ ####
171
+ # filter out/skip
172
+ if exclude && @exclude.include?( meta.name )
173
+ puts " skipping / exclude #{id} >#{meta.name}<..."
174
+ next
175
+ end
176
+
177
+ blk.call( meta, id )
178
+ end
179
+ end
180
+
181
+
182
+ def _normalize_trait_type( trait_type )
183
+ if @patch && @patch[:trait_types]
184
+ @patch[:trait_types][ trait_type ] || trait_type
185
+ else
186
+ trait_type
187
+ end
188
+ end
189
+
190
+ def _normalize_trait_value( trait_value )
191
+ if @patch && @patch[:trait_values]
192
+ @patch[:trait_values][ trait_value ] || trait_value
193
+ else
194
+ trait_value
195
+ end
196
+ end
197
+
198
+
199
+
200
+ def export_attributes
201
+ ## step 1: get counters
202
+ stats = calc_attribute_counters
203
+
204
+ total = stats[:total]
205
+ counter = stats[:traits]
206
+
207
+ puts
208
+ puts "attribute usage / counts:"
209
+ pp total
210
+ puts
211
+
212
+ puts "#{counter.size} attribute(s):"
213
+ counter.each do |trait_name, trait_rec|
214
+ puts " #{trait_name} #{trait_rec[:count]} (#{trait_rec[:by_type].size} uniques)"
215
+ end
216
+
217
+ recs = []
218
+
219
+
220
+ ## step 2: get tabular data
221
+ each_meta do |meta, id| ## todo/fix: change id to index
222
+
223
+ traits = meta.traits
224
+ # print "#{traits.size} - "
225
+ # pp traits
226
+
227
+ print "#{id}.." if id % 100 == 0 ## print progress report
228
+
229
+ ## setup empty hash table (with all attributes)
230
+ rec = {
231
+ 'Slug' => _meta_slugify( meta, id ),
232
+ 'Name' => meta.name,
233
+ }
234
+ ## add all attributes
235
+ counter.keys.reduce( rec ) { |h,value| h[value] = []; h }
236
+ ## pp rec
237
+
238
+ ## note: use an array (to allow multiple values for attributes)
239
+ traits.each do |trait_type, trait_value|
240
+ trait_type = _normalize_trait_type( trait_type )
241
+ trait_value = _normalize_trait_value( trait_value )
242
+
243
+ values = rec[ trait_type ]
244
+ values << trait_value
245
+ end
246
+ recs << rec
247
+ end
248
+ print "\n"
249
+
250
+ ## pp recs
251
+
252
+ ## flatten recs
253
+ data = []
254
+ recs.each do |rec|
255
+ row = rec.values.map do |value|
256
+ if value.is_a?( Array )
257
+ value.join( ' / ' )
258
+ else
259
+ value
260
+ end
261
+ end
262
+ data << row
263
+ end
264
+
265
+
266
+ ## sort by slug
267
+ data = data.sort {|l,r| l[0] <=> r[0] }
268
+ pp data
269
+
270
+ ### save dataset
271
+ ## note: change first colum Slug to ID - only used for "internal" sort etc.
272
+ headers = ['ID', 'Name'] + counter.keys ## add header row
273
+
274
+ path = "./#{@slug}/tmp/#{@slug}.csv"
275
+ dirname = File.dirname( path )
276
+ FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
277
+
278
+ File.open( path, 'w:utf-8' ) do |f|
279
+ f.write( headers.join( ', ' ))
280
+ f.write( "\n" )
281
+ ## note: replace ID with our own internal running (zero-based) counter
282
+ data.each_with_index do |row,i|
283
+ f.write( ([i]+row[1..-1]).join( ', '))
284
+ f.write( "\n" )
285
+ end
286
+ end
287
+ end
288
+
289
+
290
+ def calc_attribute_counters ## todo/check: use a different name _counts/_stats etc - why? why not?
291
+
292
+ attributes_by_count = { count: 0,
293
+ by_count: Hash.new(0)
294
+ }
295
+ counter = {}
296
+
297
+
298
+ each_meta do |meta, id| ## todo/fix: change id to index
299
+ traits = meta.traits
300
+ # print "#{traits.size} - "
301
+ # pp traits
302
+
303
+ print "#{id}.." if id % 100 == 0 ## print progress report
304
+
305
+ attributes_by_count[ :count ] +=1
306
+ attributes_by_count[ :by_count ][ traits.size ] += 1
307
+
308
+ traits.each do |trait_type, trait_value|
309
+ trait_type = _normalize_trait_type( trait_type )
310
+ trait_value = _normalize_trait_value( trait_value )
311
+
312
+
313
+ rec = counter[ trait_type ] ||= { count: 0,
314
+ by_type: Hash.new(0)
315
+ }
316
+ rec[ :count ] +=1
317
+ rec[ :by_type ][ trait_value ] += 1
318
+ end
319
+ end
320
+
321
+ print "\n"
322
+ puts
323
+
324
+ ## return all-in-one hash
325
+ {
326
+ total: attributes_by_count,
327
+ traits: counter,
328
+ }
329
+ end
330
+
331
+
332
+ def dump_attributes
333
+ stats = calc_attribute_counters
334
+
335
+ total = stats[:total]
336
+ counter = stats[:traits]
337
+
338
+ puts
339
+ puts "attribute usage / counts:"
340
+ pp total
341
+ puts
342
+
343
+ puts "#{counter.size} attribute(s):"
344
+ counter.each do |trait_name, trait_rec|
345
+ puts " #{trait_name} #{trait_rec[:count]} (#{trait_rec[:by_type].size} uniques)"
346
+ end
347
+
348
+ puts
349
+ pp counter
350
+ end
351
+
352
+
353
+
354
+ def pixelate( range=(0...@count) )
355
+
356
+ meta_slugs = Hash.new( 0 ) ## deduplicate (auto-add counter if duplicate)
357
+
358
+ ### todo/fix: must read slugs starting at 0
359
+ ### to work for deduplicate!!!!!!
360
+
361
+
362
+ range.each do |id|
363
+ meta = OpenSea::Meta.read( "./#{@slug}/meta/#{id}.json" )
364
+
365
+ ####
366
+ # filter out/skip
367
+ if @exclude.include?( meta.name )
368
+ puts " skipping / exclude #{id} >#{meta.name}<..."
369
+ next
370
+ end
371
+
372
+ puts meta.name
373
+
374
+
375
+ meta_slug = _meta_slugify( meta, id )
376
+ count = meta_slugs[ meta_slug ] += 1
377
+
378
+ meta_slug = "#{meta_slug}_(#{count})" if count > 1
379
+
380
+
381
+ img = Image.read( "./#{@slug}/i/#{id}.png" )
382
+
383
+ pix = _image_pixelate( img )
384
+
385
+ path = "./#{@slug}/ii/#{meta_slug}.png"
386
+ puts " saving to >#{path}<..."
387
+ pix.save( path )
388
+ end
389
+ end
390
+
391
+
392
+
393
+ ################################
394
+ # private (static) helpers
395
+ #
396
+
397
+ def self.download_images( range, collection,
398
+ original: false )
399
+ start = Time.now
400
+ delay_in_s = 0.3
401
+
402
+ range.each do |offset|
403
+ meta = OpenSea::Meta.read( "./#{collection}/meta/#{offset}.json" )
404
+
405
+ puts "==> #{offset}.json - #{meta.name}"
406
+
407
+ image_src = if original
408
+ meta.image_original_url
409
+ else
410
+ meta.image_url
411
+ end
412
+
413
+ puts " >#{image_src}<"
414
+ if image_src.nil?
415
+ puts "!! ERROR - no image url found (use original: #{original}):"
416
+ pp meta
417
+ exit 1
418
+ end
419
+
420
+ ## note: use a different directory to avoid size confusion!!!
421
+ img_slug = if original
422
+ 'i_org'
423
+ else
424
+ 'i'
425
+ end
426
+
427
+ ## note: will auto-add format file extension (e.g. .png, .jpg)
428
+ ## depending on http content type!!!!!
429
+ copy_image( image_src, "./#{collection}/#{img_slug}/#{offset}" )
430
+
431
+ stop = Time.now
432
+ diff = stop - start
433
+
434
+ mins = diff / 60 ## todo - use floor or such?
435
+ secs = diff % 60
436
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
437
+
438
+ puts "sleeping #{delay_in_s}s..."
439
+ sleep( delay_in_s )
440
+ end
441
+ end
442
+
443
+
444
+ def self.download_meta( range, collection )
445
+ start = Time.now
446
+ delay_in_s = 0.3
447
+
448
+ range.each do |offset|
449
+
450
+ dest = "./#{collection}/meta/#{offset}.json"
451
+ meta = nil
452
+
453
+ puts "==> #{offset} / #{collection} (#{dest})..."
454
+
455
+ data = OpenSea.assets( collection: collection,
456
+ offset: offset )
457
+ meta = OpenSea::Meta.new( data )
458
+ puts " name: >#{meta.name}<"
459
+ puts " image_url: >#{meta.image_url}<"
460
+
461
+
462
+ ## make sure path exists
463
+ dirname = File.dirname( dest )
464
+ FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
465
+
466
+ File.open( dest, "w:utf-8" ) do |f|
467
+ f.write( JSON.pretty_generate( data ) )
468
+ end
469
+
470
+
471
+ stop = Time.now
472
+ diff = stop - start
473
+
474
+ mins = diff / 60 ## todo - use floor or such?
475
+ secs = diff % 60
476
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
477
+
478
+ puts " sleeping #{delay_in_s}s..."
479
+ sleep( delay_in_s )
480
+ end
481
+ end
482
+
483
+
484
+ end # class Collection
@@ -0,0 +1,72 @@
1
+
2
+ class TokenCollection
3
+
4
+ attr_reader :slug, :count
5
+
6
+ def initialize( slug, count,
7
+ token_base: ) # check: rename count to items or such - why? why not?
8
+ @slug = slug
9
+ @count = count
10
+ @token_base = token_base
11
+ end
12
+
13
+
14
+ def download_meta( range=(0...@count) )
15
+ start = Time.now
16
+ delay_in_s = 0.3
17
+
18
+ range.each do |offset|
19
+ token_src = @token_base.sub( '{id}', offset.to_s )
20
+
21
+ puts "==> #{offset} - #{@slug}..."
22
+
23
+ copy_json( token_src, "./#{@slug}/token-meta/#{offset}.json" )
24
+
25
+ stop = Time.now
26
+ diff = stop - start
27
+
28
+ mins = diff / 60 ## todo - use floor or such?
29
+ secs = diff % 60
30
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
31
+
32
+ puts "sleeping #{delay_in_s}s..."
33
+ sleep( delay_in_s )
34
+ end
35
+ end
36
+
37
+
38
+ def download_images( range=(0...@count) )
39
+ start = Time.now
40
+ delay_in_s = 0.3
41
+
42
+ range.each do |offset|
43
+ txt = File.open( "./#{@slug}/token-meta/#{offset}.json", 'r:utf-8') { |f| f.read }
44
+ data = JSON.parse( txt )
45
+
46
+ meta_name = data['name']
47
+ meta_image = data['image']
48
+
49
+ puts "==> #{offset} - #{@slug}..."
50
+ puts " name: #{meta_name}"
51
+ puts " image: #{meta_image}"
52
+
53
+ ## note: will auto-add format file extension (e.g. .png, .jpg)
54
+ ## depending on http content type!!!!!
55
+ start_copy = Time.now
56
+ copy_image( meta_image, "./#{@slug}/token-i/#{offset}" )
57
+
58
+ stop = Time.now
59
+
60
+ diff = stop -start_copy
61
+ puts " download image in #{diff} sec(s)"
62
+
63
+ diff = stop - start
64
+ mins = diff / 60 ## todo - use floor or such?
65
+ secs = diff % 60
66
+ puts "up #{mins} mins #{secs} secs (total #{diff} secs)"
67
+
68
+ puts "sleeping #{delay_in_s}s..."
69
+ sleep( delay_in_s )
70
+ end
71
+ end
72
+ end # class TokenCollection
@@ -0,0 +1,12 @@
1
+
2
+
3
+
4
+
5
+
6
+
7
+
8
+
9
+ require_relative 'collection/token'
10
+ require_relative 'collection/image'
11
+ require_relative 'collection/opensea'
12
+
@@ -0,0 +1,149 @@
1
+
2
+
3
+
4
+ def slugify( name )
5
+ name.downcase.gsub( /[^a-z0-9 ()$_-]/ ) do |_|
6
+ puts " !! WARN: asciify - found (and removing) non-ascii char >#{Regexp.last_match}<"
7
+ '' ## remove - use empty string
8
+ end.gsub( ' ', '_')
9
+ end
10
+
11
+
12
+
13
+
14
+ def convert_images( collection, from: 'jpg',
15
+ to: 'png' )
16
+ files = Dir.glob( "./#{collection}/i/*.#{from}" )
17
+ puts "==> converting #{files.size} image(s) from #{from} to #{to}"
18
+
19
+ files.each_with_index do |file,i|
20
+ dirname = File.dirname( file )
21
+ extname = File.extname( file )
22
+ basename = File.basename( file, extname )
23
+
24
+ cmd = "magick convert #{dirname}/#{basename}.#{from} #{dirname}/#{basename}.#{to}"
25
+
26
+ puts " [#{i+1}/#{files.size}] - #{cmd}"
27
+ system( cmd )
28
+
29
+ if from == 'gif'
30
+ ## assume multi-images for gif
31
+ ## save image-0.png to image.png
32
+ path0 = "#{dirname}/#{basename}-0.#{to}"
33
+ path = "#{dirname}/#{basename}.#{to}"
34
+
35
+ puts " saving #{path0} to #{path}..."
36
+
37
+ blob = File.open( path0, 'rb' ) { |f| f.read }
38
+ File.open( path, 'wb' ) { |f| f.write( blob ) }
39
+ end
40
+ end
41
+ end
42
+
43
+
44
+
45
+
46
+
47
+
48
+
49
+
50
+ def copy_json( src, dest )
51
+ uri = URI.parse( src )
52
+
53
+ http = Net::HTTP.new( uri.host, uri.port )
54
+
55
+ puts "[debug] GET #{uri.request_uri} uri=#{uri}"
56
+
57
+ headers = { 'User-Agent' => "ruby v#{RUBY_VERSION}" }
58
+
59
+
60
+ request = Net::HTTP::Get.new( uri.request_uri, headers )
61
+ if uri.instance_of? URI::HTTPS
62
+ http.use_ssl = true
63
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
64
+ end
65
+
66
+ response = http.request( request )
67
+
68
+ if response.code == '200'
69
+ puts "#{response.code} #{response.message}"
70
+ puts " content_type: #{response.content_type}, content_length: #{response.content_length}"
71
+
72
+ text = response.body.to_s
73
+ text = text.force_encoding( Encoding::UTF_8 )
74
+
75
+ data = JSON.parse( text )
76
+
77
+ File.open( dest, "w:utf-8" ) do |f|
78
+ f.write( JSON.pretty_generate( data ) )
79
+ end
80
+ else
81
+ puts "!! error:"
82
+ puts "#{response.code} #{response.message}"
83
+ exit 1
84
+ end
85
+ end
86
+
87
+
88
+ def copy_image( src, dest,
89
+ dump_headers: false )
90
+ uri = URI.parse( src )
91
+
92
+ http = Net::HTTP.new( uri.host, uri.port )
93
+
94
+ puts "[debug] GET #{uri.request_uri} uri=#{uri}"
95
+
96
+ headers = { 'User-Agent' => "ruby v#{RUBY_VERSION}" }
97
+
98
+ request = Net::HTTP::Get.new( uri.request_uri, headers )
99
+ if uri.instance_of? URI::HTTPS
100
+ http.use_ssl = true
101
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
102
+ end
103
+
104
+ response = http.request( request )
105
+
106
+ if response.code == '200'
107
+ puts "#{response.code} #{response.message}"
108
+
109
+ content_type = response.content_type
110
+ content_length = response.content_length
111
+ puts " content_type: #{content_type}, content_length: #{content_length}"
112
+
113
+ if dump_headers ## for debugging dump headers
114
+ headers = response.each_header.to_h
115
+ puts "htttp respone headers:"
116
+ pp headers
117
+ end
118
+
119
+
120
+ format = if content_type =~ %r{image/jpeg}i
121
+ 'jpg'
122
+ elsif content_type =~ %r{image/png}i
123
+ 'png'
124
+ elsif content_type =~ %r{image/gif}i
125
+ 'gif'
126
+ else
127
+ puts "!! error:"
128
+ puts " unknown image format content type: >#{content_type}<"
129
+ exit 1
130
+ end
131
+
132
+ ## make sure path exits - autocreate dirs
133
+ ## make sure path exists
134
+ dirname = File.dirname( "#{dest}.#{format}" )
135
+ FileUtils.mkdir_p( dirname ) unless Dir.exist?( dirname )
136
+
137
+ File.open( "#{dest}.#{format}", 'wb' ) do |f|
138
+ f.write( response.body )
139
+ end
140
+ else
141
+ puts "!! error:"
142
+ puts "#{response.code} #{response.message}"
143
+ exit 1
144
+ end
145
+ end
146
+
147
+
148
+
149
+