nbtfile 0.0.8 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile CHANGED
@@ -41,6 +41,7 @@ require 'rake/rdoctask'
41
41
  Rake::RDocTask.new do |rdoc|
42
42
  version = File.exist?('VERSION') ? File.read('VERSION') : ""
43
43
 
44
+ rdoc.main = 'README.rdoc'
44
45
  rdoc.rdoc_dir = 'rdoc'
45
46
  rdoc.title = "nbtfile #{version}"
46
47
  rdoc.rdoc_files.include('README*')
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.8
1
+ 0.1.0
@@ -23,8 +23,10 @@
23
23
 
24
24
  require 'zlib'
25
25
  require 'stringio'
26
+ require 'yaml'
27
+ require 'enumerator'
26
28
 
27
- class String
29
+ class String #:nodoc: all
28
30
  begin
29
31
  alias_method :_nbtfile_getbyte, :getbyte
30
32
  rescue NameError
@@ -67,28 +69,57 @@ end
67
69
 
68
70
  module NBTFile
69
71
 
72
+ # Raised when an invalid string encoding is encountered
70
73
  class EncodingError < RuntimeError
71
74
  end
72
75
 
76
+ module Private #:nodoc: all
73
77
  TOKEN_CLASSES_BY_INDEX = []
74
78
  TOKEN_INDICES_BY_CLASS = {}
75
79
 
76
80
  BaseToken = Struct.new :name, :value
81
+ end
77
82
 
83
+ # Classes representing NBT tokens. Each has a constructor with
84
+ # two arguments, name and value, and corresponding accessors.
78
85
  module Tokens
79
86
  tag_names = %w(End Byte Short Int Long Float Double
80
87
  Byte_Array String List Compound)
81
88
  tag_names.each_with_index do |tag_name, index|
82
89
  tag_name = "TAG_#{tag_name}"
83
- token_class = Class.new(BaseToken)
90
+ token_class = Class.new(Private::BaseToken)
84
91
 
85
92
  const_set tag_name, token_class
86
93
 
87
- TOKEN_CLASSES_BY_INDEX[index] = token_class
88
- TOKEN_INDICES_BY_CLASS[token_class] = index
94
+ Private::TOKEN_CLASSES_BY_INDEX[index] = token_class
95
+ Private::TOKEN_INDICES_BY_CLASS[token_class] = index
96
+ end
97
+ class TAG_End
98
+ end
99
+ class TAG_Byte
100
+ end
101
+ class TAG_Short
102
+ end
103
+ class TAG_Int
104
+ end
105
+ class TAG_Long
106
+ end
107
+ class TAG_Float
108
+ end
109
+ class TAG_Double
110
+ end
111
+ class TAG_String
112
+ end
113
+ class TAG_Byte_Array
114
+ end
115
+ class TAG_List
116
+ end
117
+ class TAG_Compound
89
118
  end
90
119
  end
91
120
 
121
+
122
+ module Private #:nodoc: all
92
123
  module CommonMethods
93
124
  def sign_bit(n_bytes)
94
125
  1 << ((n_bytes << 3) - 1)
@@ -192,17 +223,17 @@ module ReadMethods
192
223
  value = read_string(io)
193
224
  when type == TAG_List
194
225
  list_type, list_length = read_list_header(io)
195
- next_state = ListReaderState.new(state, list_type, list_length)
226
+ next_state = ListTokenizerState.new(state, list_type, list_length)
196
227
  value = list_type
197
228
  when type == TAG_Compound
198
- next_state = CompoundReaderState.new(state)
229
+ next_state = CompoundTokenizerState.new(state)
199
230
  end
200
231
 
201
232
  [next_state, type[name, value]]
202
233
  end
203
234
  end
204
235
 
205
- class TopReaderState
236
+ class TopTokenizerState
206
237
  include ReadMethods
207
238
  include Tokens
208
239
 
@@ -210,13 +241,13 @@ class TopReaderState
210
241
  type = read_type(io)
211
242
  raise RuntimeError, "expected TAG_Compound" unless type == TAG_Compound
212
243
  name = read_string(io)
213
- end_state = EndReaderState.new()
214
- next_state = CompoundReaderState.new(end_state)
244
+ end_state = EndTokenizerState.new()
245
+ next_state = CompoundTokenizerState.new(end_state)
215
246
  [next_state, type[name, nil]]
216
247
  end
217
248
  end
218
249
 
219
- class CompoundReaderState
250
+ class CompoundTokenizerState
220
251
  include ReadMethods
221
252
  include Tokens
222
253
 
@@ -237,7 +268,7 @@ class CompoundReaderState
237
268
  end
238
269
  end
239
270
 
240
- class ListReaderState
271
+ class ListTokenizerState
241
272
  include ReadMethods
242
273
  include Tokens
243
274
 
@@ -262,16 +293,16 @@ class ListReaderState
262
293
  end
263
294
  end
264
295
 
265
- class EndReaderState
296
+ class EndTokenizerState
266
297
  def get_token(io)
267
298
  [self, nil]
268
299
  end
269
300
  end
270
301
 
271
- class Reader
302
+ class Tokenizer
272
303
  def initialize(io)
273
304
  @gz = Zlib::GzipReader.new(io)
274
- @state = TopReaderState.new()
305
+ @state = TopTokenizerState.new()
275
306
  end
276
307
 
277
308
  def each_token
@@ -286,7 +317,7 @@ class Reader
286
317
  end
287
318
  end
288
319
 
289
- module WriteMethods
320
+ module EmitMethods
290
321
  include Tokens
291
322
  include CommonMethods
292
323
 
@@ -372,9 +403,9 @@ module WriteMethods
372
403
  when type == TAG_Double
373
404
  emit_double(io, value)
374
405
  when type == TAG_List
375
- next_state = ListWriterState.new(state, value, capturing)
406
+ next_state = ListEmitterState.new(state, value, capturing)
376
407
  when type == TAG_Compound
377
- next_state = CompoundWriterState.new(state, capturing)
408
+ next_state = CompoundEmitterState.new(state, capturing)
378
409
  when type == TAG_End
379
410
  next_state = cont
380
411
  else
@@ -385,8 +416,8 @@ module WriteMethods
385
416
  end
386
417
  end
387
418
 
388
- class TopWriterState
389
- include WriteMethods
419
+ class TopEmitterState
420
+ include EmitMethods
390
421
  include Tokens
391
422
 
392
423
  def emit_token(io, token)
@@ -394,15 +425,15 @@ class TopWriterState
394
425
  when TAG_Compound
395
426
  emit_type(io, token.class)
396
427
  emit_string(io, token.name)
397
- end_state = EndWriterState.new()
398
- next_state = CompoundWriterState.new(end_state, nil)
428
+ end_state = EndEmitterState.new()
429
+ next_state = CompoundEmitterState.new(end_state, nil)
399
430
  next_state
400
431
  end
401
432
  end
402
433
  end
403
434
 
404
- class CompoundWriterState
405
- include WriteMethods
435
+ class CompoundEmitterState
436
+ include EmitMethods
406
437
  include Tokens
407
438
 
408
439
  def initialize(cont, capturing)
@@ -426,8 +457,8 @@ class CompoundWriterState
426
457
  end
427
458
  end
428
459
 
429
- class ListWriterState
430
- include WriteMethods
460
+ class ListEmitterState
461
+ include EmitMethods
431
462
  include Tokens
432
463
 
433
464
  def initialize(cont, type, capturing)
@@ -462,7 +493,7 @@ class ListWriterState
462
493
  end
463
494
  end
464
495
 
465
- class EndWriterState
496
+ class EndEmitterState
466
497
  def emit_token(io, token)
467
498
  raise RuntimeError, "unexpected token #{token.class} after end"
468
499
  end
@@ -472,19 +503,26 @@ class EndWriterState
472
503
  end
473
504
  end
474
505
 
475
- class Writer
506
+ end
507
+ include Private
508
+
509
+ class Emitter
510
+ include Private
476
511
  include Tokens
477
512
 
478
- def initialize(stream)
513
+ def initialize(stream) #:nodoc:
479
514
  @gz = Zlib::GzipWriter.new(stream)
480
- @state = TopWriterState.new()
515
+ @state = TopEmitterState.new()
481
516
  end
482
517
 
518
+ # Emit a token. See the Tokens module for a list of token types.
483
519
  def emit_token(token)
484
520
  @state = @state.emit_token(@gz, token)
485
521
  end
486
522
 
487
- def emit_compound(name)
523
+ # Emit a TAG_Compound token, call the block, and then emit a matching
524
+ # TAG_End token.
525
+ def emit_compound(name) #:yields:
488
526
  emit_token(TAG_Compound[name, nil])
489
527
  begin
490
528
  yield
@@ -493,7 +531,9 @@ class Writer
493
531
  end
494
532
  end
495
533
 
496
- def emit_list(name, type)
534
+ # Emit a TAG_List token, call the block, and then emit a matching TAG_End
535
+ # token.
536
+ def emit_list(name, type) #:yields:
497
537
  emit_token(TAG_List[name, type])
498
538
  begin
499
539
  yield
@@ -502,21 +542,24 @@ class Writer
502
542
  end
503
543
  end
504
544
 
545
+ # Emits a list item, given a value (the token type is assumed based on
546
+ # the element type of the enclosing list).
505
547
  def emit_item(value)
506
548
  @state = @state.emit_item(@gz, value)
507
549
  end
508
550
 
509
- def finish
551
+ def finish #:nodoc:
510
552
  @gz.close
511
553
  end
512
554
  end
513
555
 
556
+ # Produce a sequence of NBT tokens from a stream
514
557
  def self.tokenize(io)
515
558
  case io
516
559
  when String
517
560
  io = StringIO.new(io, "rb")
518
561
  end
519
- reader = Reader.new(io)
562
+ reader = Tokenizer.new(io)
520
563
 
521
564
  if block_given?
522
565
  reader.each_token { |token| yield token }
@@ -527,15 +570,18 @@ def self.tokenize(io)
527
570
  end
528
571
  end
529
572
 
530
- def self.emit(io)
531
- writer = Writer.new(io)
573
+ # Emit a NBT tokens to a stream
574
+ def self.emit(io) #:yields: emitter
575
+ emitter = Emitter.new(io)
532
576
  begin
533
- yield writer
577
+ yield emitter
534
578
  ensure
535
- writer.finish
579
+ emitter.finish
536
580
  end
537
581
  end
538
582
 
583
+ # Load an NBT file as a Ruby data structure; returns a pair containing
584
+ # the name of the top-level compound tag and its value
539
585
  def self.load(io)
540
586
  root = {}
541
587
  stack = [root]
@@ -564,4 +610,416 @@ def self.load(io)
564
610
  root.first
565
611
  end
566
612
 
613
+ # Utility helper which transcodes a stream directly to YAML
614
+ def self.transcode_to_yaml(input, output)
615
+ YAML.dump(load(input), output)
616
+ end
617
+
618
+ # Reads an NBT stream as a data structure and returns a pair containing the
619
+ # name of the top-level compound tag and its value.
620
+ def self.read(io)
621
+ root = {}
622
+ stack = [root]
623
+
624
+ self.tokenize(io) do |token|
625
+ case token
626
+ when Tokens::TAG_Byte
627
+ value = Types::Byte.new(token.value)
628
+ when Tokens::TAG_Short
629
+ value = Types::Short.new(token.value)
630
+ when Tokens::TAG_Int
631
+ value = Types::Int.new(token.value)
632
+ when Tokens::TAG_Long
633
+ value = Types::Long.new(token.value)
634
+ when Tokens::TAG_Float
635
+ value = Types::Float.new(token.value)
636
+ when Tokens::TAG_Double
637
+ value = Types::Double.new(token.value)
638
+ when Tokens::TAG_String
639
+ value = Types::String.new(token.value)
640
+ when Tokens::TAG_Byte_Array
641
+ value = Types::ByteArray.new(token.value)
642
+ when Tokens::TAG_List
643
+ tag = token.value
644
+ case
645
+ when tag == Tokens::TAG_Byte
646
+ type = Types::Byte
647
+ when tag == Tokens::TAG_Short
648
+ type = Types::Short
649
+ when tag == Tokens::TAG_Int
650
+ type = Types::Int
651
+ when tag == Tokens::TAG_Long
652
+ type = Types::Long
653
+ when tag == Tokens::TAG_Float
654
+ type = Types::Float
655
+ when tag == Tokens::TAG_Double
656
+ type = Types::Double
657
+ when tag == Tokens::TAG_String
658
+ type = Types::String
659
+ when tag == Tokens::TAG_Byte_Array
660
+ type = Types::ByteArray
661
+ when tag == Tokens::TAG_List
662
+ type = Types::List
663
+ when tag == Tokens::TAG_Compound
664
+ type = Types::Compound
665
+ else
666
+ raise TypeError, "Unexpected list type #{token.value}"
667
+ end
668
+ value = Types::List.new(type)
669
+ when Tokens::TAG_Compound
670
+ value = Types::Compound.new
671
+ when Tokens::TAG_End
672
+ stack.pop
673
+ next
674
+ else
675
+ raise TypeError, "Unexpected token type #{token.class}"
676
+ end
677
+
678
+ current = stack.last
679
+ case current
680
+ when Types::List
681
+ current << value
682
+ else
683
+ current[token.name] = value
684
+ end
685
+
686
+ case token
687
+ when Tokens::TAG_Compound, Tokens::TAG_List
688
+ stack.push value
689
+ end
690
+ end
691
+
692
+ root.first
693
+ end
694
+
695
+ module Private #:nodoc: all
696
+ class Writer
697
+ def initialize(emitter)
698
+ @emitter = emitter
699
+ end
700
+
701
+ def type_to_token(type)
702
+ case
703
+ when type == Types::Byte
704
+ token = Tokens::TAG_Byte
705
+ when type == Types::Short
706
+ token = Tokens::TAG_Short
707
+ when type == Types::Int
708
+ token = Tokens::TAG_Int
709
+ when type == Types::Long
710
+ token = Tokens::TAG_Long
711
+ when type == Types::Float
712
+ token = Tokens::TAG_Float
713
+ when type == Types::Double
714
+ token = Tokens::TAG_Double
715
+ when type == Types::String
716
+ token = Tokens::TAG_String
717
+ when type == Types::ByteArray
718
+ token = Tokens::TAG_Byte_Array
719
+ when type == Types::List
720
+ token = Tokens::TAG_List
721
+ when type == Types::Compound
722
+ token = Tokens::TAG_Compound
723
+ else
724
+ raise TypeError, "Unexpected list type #{type}"
725
+ end
726
+ return token
727
+ end
728
+
729
+ def write_pair(name, value)
730
+ case value
731
+ when Types::Byte
732
+ @emitter.emit_token(Tokens::TAG_Byte[name, value.value])
733
+ when Types::Short
734
+ @emitter.emit_token(Tokens::TAG_Short[name, value.value])
735
+ when Types::Int
736
+ @emitter.emit_token(Tokens::TAG_Int[name, value.value])
737
+ when Types::Long
738
+ @emitter.emit_token(Tokens::TAG_Long[name, value.value])
739
+ when Types::Float
740
+ @emitter.emit_token(Tokens::TAG_Float[name, value.value])
741
+ when Types::Double
742
+ @emitter.emit_token(Tokens::TAG_Double[name, value.value])
743
+ when Types::String
744
+ @emitter.emit_token(Tokens::TAG_String[name, value.value])
745
+ when Types::ByteArray
746
+ @emitter.emit_token(Tokens::TAG_Byte_Array[name, value.value])
747
+ when Types::List
748
+ token = type_to_token(value.type)
749
+ @emitter.emit_token(Tokens::TAG_List[name, token])
750
+ for item in value
751
+ write_pair(nil, item)
752
+ end
753
+ @emitter.emit_token(Tokens::TAG_End[nil, nil])
754
+ when Types::Compound
755
+ @emitter.emit_token(Tokens::TAG_Compound[name, nil])
756
+ for k, v in value
757
+ write_pair(k, v)
758
+ end
759
+ @emitter.emit_token(Tokens::TAG_End[nil, nil])
760
+ end
761
+ end
762
+ end
763
+ end
764
+
765
+ def self.write(io, name, body)
766
+ emit(io) do |emitter|
767
+ writer = Writer.new(emitter)
768
+ writer.write_pair(name, body)
769
+ end
770
+ end
771
+
772
+ module Types
773
+ module Private #:nodoc: all
774
+ module Base
775
+ end
776
+
777
+ class BaseScalar
778
+ include Private::Base
779
+ include Comparable
780
+
781
+ attr_reader :value
782
+
783
+ def <=>(other)
784
+ if other.kind_of? BaseScalar
785
+ @value <=> other.value
786
+ else
787
+ @value <=> other
788
+ end
789
+ end
790
+ end
791
+
792
+ class BaseInteger < BaseScalar
793
+ def self.make_subclass(n_bits)
794
+ subclass = Class.new(self)
795
+ limit = 1 << (n_bits - 1)
796
+ subclass.const_set(:RANGE, -limit..(limit-1))
797
+ subclass
798
+ end
799
+
800
+ def initialize(value)
801
+ unless self.class::RANGE.include? value
802
+ raise RangeError, "Value out of range"
803
+ end
804
+ int_value = value.to_int
805
+ if int_value != value
806
+ raise TypeError, "Not an integer"
807
+ end
808
+ @value = value
809
+ end
810
+
811
+ def ==(other)
812
+ if other.respond_to? :to_int
813
+ self.to_int == other.to_int
814
+ else
815
+ false
816
+ end
817
+ end
818
+
819
+ def eql?(other)
820
+ other.class == self.class and other.value == @value
821
+ end
822
+
823
+ def hash
824
+ [self.class, @value].hash
825
+ end
826
+
827
+ alias_method :to_int, :value
828
+ alias_method :to_i, :value
829
+ end
830
+
831
+ class BaseFloat < BaseScalar
832
+ def initialize(value)
833
+ unless Numeric === value
834
+ raise TypeError
835
+ end
836
+ float_value = value.to_f
837
+ @value = float_value
838
+ end
839
+
840
+ def ==(other)
841
+ if Numeric === other or BaseFloat === other
842
+ @value == other.to_f
843
+ else
844
+ false
845
+ end
846
+ end
847
+
848
+ def eql?(other)
849
+ other.class == self.class and other.value == @value
850
+ end
851
+
852
+ def hash
853
+ [self.class, @value].hash
854
+ end
855
+
856
+ alias_method :to_f, :value
857
+ end
858
+ end
859
+ include Private
860
+
861
+ Byte = BaseInteger.make_subclass(8)
862
+ class Byte
863
+ end
864
+ Short = BaseInteger.make_subclass(16)
865
+ class Short
866
+ end
867
+ Int = BaseInteger.make_subclass(32)
868
+ class Int
869
+ end
870
+ Long = BaseInteger.make_subclass(64)
871
+ class Long
872
+ end
873
+
874
+ class Float < BaseFloat
875
+ end
876
+
877
+ class Double < BaseFloat
878
+ end
879
+
880
+ class String < BaseScalar
881
+ def initialize(value)
882
+ unless value.respond_to? :to_str
883
+ raise TypeError, "String or string-like expected"
884
+ end
885
+ @value = value.to_str
886
+ end
887
+
888
+ def to_s ; @value.dup ; end
889
+ alias_method :to_str, :to_s
890
+ end
891
+
892
+ class ByteArray
893
+ include Private::Base
894
+
895
+ attr_reader :value
896
+
897
+ def initialize(value)
898
+ unless value.respond_to? :to_str
899
+ raise TypeError, "String or string-like expected"
900
+ end
901
+ @value = value.to_str
902
+ end
903
+
904
+ def ==(other)
905
+ self.class == other.class && @value == other.value
906
+ end
907
+
908
+ def to_s ; @value.dup ; end
909
+ alias_method :to_str, :to_s
910
+ end
911
+
912
+ class List
913
+ include Private::Base
914
+ include Enumerable
915
+
916
+ attr_reader :type
917
+
918
+ def initialize(type, items=[])
919
+ @type = type
920
+ @items = []
921
+ for item in items
922
+ self << item
923
+ end
924
+ end
925
+
926
+ def <<(item)
927
+ unless item.instance_of? @type
928
+ raise TypeError, "Items should be instances of #{@type}"
929
+ end
930
+ @items << item
931
+ self
932
+ end
933
+
934
+ def each
935
+ if block_given?
936
+ @items.each { |item| yield item }
937
+ self
938
+ else
939
+ @items.each
940
+ end
941
+ end
942
+
943
+ def to_a
944
+ @items.dup
945
+ end
946
+
947
+ def length
948
+ @items.length
949
+ end
950
+ alias_method :size, :length
951
+
952
+ def ==(other)
953
+ self.class == other.class && @items == other.to_a
954
+ end
955
+ end
956
+
957
+ class Compound
958
+ include Private::Base
959
+ include Enumerable
960
+
961
+ def initialize(contents={})
962
+ @hash = {}
963
+ @key_order = []
964
+ for key, value in contents
965
+ self[key] = value
966
+ end
967
+ end
968
+
969
+ def has_key?(key)
970
+ @hash.has_key? key
971
+ end
972
+ alias_method :include?, :has_key?
973
+
974
+ def []=(key, value)
975
+ unless key.instance_of? ::String
976
+ raise TypeError, "Key must be a string"
977
+ end
978
+ unless value.kind_of? Private::Base
979
+ raise TypeError, "#{value.class} is not an NBT type"
980
+ end
981
+ @key_order << key unless @hash.has_key? key
982
+ @hash[key] = value
983
+ value
984
+ end
985
+
986
+ def [](key)
987
+ @hash[key]
988
+ end
989
+
990
+ def delete(key)
991
+ if @hash.has_key? key
992
+ @key_order.delete key
993
+ @hash.delete key
994
+ end
995
+ self
996
+ end
997
+
998
+ def keys
999
+ @key_order.dup
1000
+ end
1001
+
1002
+ def values
1003
+ @key_order.map { |k| @hash[k] }
1004
+ end
1005
+
1006
+ def each
1007
+ if block_given?
1008
+ @key_order.each { |k| yield k, @hash[k] }
1009
+ self
1010
+ else
1011
+ Enumerable::Enumerator.new(self, :each)
1012
+ end
1013
+ end
1014
+
1015
+ def to_hash
1016
+ @hash.dup
1017
+ end
1018
+
1019
+ def ==(other)
1020
+ self.class == other.class && @hash == other.to_hash
1021
+ end
1022
+ end
1023
+ end
1024
+
567
1025
  end
@@ -6,11 +6,12 @@ require 'zlib'
6
6
 
7
7
  shared_examples_for "readers and writers" do
8
8
  Tokens = NBTFile::Tokens unless defined? Tokens
9
+ Types = NBTFile::Types unless defined? Types
9
10
 
10
- def self.a_reader_or_writer(desc, serialized, tokens)
11
+ def self.a_reader_or_writer(desc, serialized, tokens, tree)
11
12
  it desc do
12
13
  serialized._nbtfile_force_encoding("BINARY")
13
- check_reader_or_writer(serialized, tokens)
14
+ check_reader_or_writer(serialized, tokens, tree)
14
15
  end
15
16
  end
16
17
 
@@ -18,7 +19,8 @@ shared_examples_for "readers and writers" do
18
19
  "\x0a\x00\x03foo" \
19
20
  "\x00",
20
21
  [Tokens::TAG_Compound["foo", nil],
21
- Tokens::TAG_End["", nil]]
22
+ Tokens::TAG_End["", nil]],
23
+ ["foo", Types::Compound.new()]
22
24
 
23
25
  a_reader_or_writer "should treat integers as signed",
24
26
  "\x0a\x00\x03foo" \
@@ -26,7 +28,10 @@ shared_examples_for "readers and writers" do
26
28
  "\x00",
27
29
  [Tokens::TAG_Compound["foo", nil],
28
30
  Tokens::TAG_Int["bar", -2],
29
- Tokens::TAG_End["", nil]]
31
+ Tokens::TAG_End["", nil]],
32
+ ["foo",
33
+ Types::Compound.new({
34
+ "bar" => Types::Int.new(-2)})]
30
35
 
31
36
  a_reader_or_writer "should handle integer fields",
32
37
  "\x0a\x00\x03foo" \
@@ -34,7 +39,10 @@ shared_examples_for "readers and writers" do
34
39
  "\x00",
35
40
  [Tokens::TAG_Compound["foo", nil],
36
41
  Tokens::TAG_Int["bar", 0x01020304],
37
- Tokens::TAG_End["", nil]]
42
+ Tokens::TAG_End["", nil]],
43
+ ["foo",
44
+ Types::Compound.new({
45
+ "bar" => Types::Int.new(0x01020304)})]
38
46
 
39
47
  a_reader_or_writer "should handle short fields",
40
48
  "\x0a\x00\x03foo" \
@@ -42,7 +50,10 @@ shared_examples_for "readers and writers" do
42
50
  "\x00",
43
51
  [Tokens::TAG_Compound["foo", nil],
44
52
  Tokens::TAG_Short["bar", 0x4e5a],
45
- Tokens::TAG_End["", nil]]
53
+ Tokens::TAG_End["", nil]],
54
+ ["foo",
55
+ Types::Compound.new({
56
+ "bar" => Types::Short.new(0x4e5a)})]
46
57
 
47
58
  a_reader_or_writer "should handle byte fields",
48
59
  "\x0a\x00\x03foo" \
@@ -50,7 +61,10 @@ shared_examples_for "readers and writers" do
50
61
  "\x00",
51
62
  [Tokens::TAG_Compound["foo", nil],
52
63
  Tokens::TAG_Byte["bar", 0x4e],
53
- Tokens::TAG_End["", nil]]
64
+ Tokens::TAG_End["", nil]],
65
+ ["foo",
66
+ Types::Compound.new({
67
+ "bar" => Types::Byte.new(0x4e)})]
54
68
 
55
69
  a_reader_or_writer "should handle string fields",
56
70
  "\x0a\x00\x03foo" \
@@ -58,7 +72,10 @@ shared_examples_for "readers and writers" do
58
72
  "\x00",
59
73
  [Tokens::TAG_Compound["foo", nil],
60
74
  Tokens::TAG_String["bar", "hoge"],
61
- Tokens::TAG_End["", nil]]
75
+ Tokens::TAG_End["", nil]],
76
+ ["foo",
77
+ Types::Compound.new({
78
+ "bar" => Types::String.new("hoge")})]
62
79
 
63
80
  a_reader_or_writer "should handle byte array fields",
64
81
  "\x0a\x00\x03foo" \
@@ -66,7 +83,10 @@ shared_examples_for "readers and writers" do
66
83
  "\x00",
67
84
  [Tokens::TAG_Compound["foo", nil],
68
85
  Tokens::TAG_Byte_Array["bar", "\x01\x02\x03\x04\x05"],
69
- Tokens::TAG_End["", nil]]
86
+ Tokens::TAG_End["", nil]],
87
+ ["foo",
88
+ Types::Compound.new({
89
+ "bar" => Types::ByteArray.new("\x01\x02\x03\x04\x05")})]
70
90
 
71
91
  a_reader_or_writer "should handle long fields",
72
92
  "\x0a\x00\x03foo" \
@@ -74,7 +94,10 @@ shared_examples_for "readers and writers" do
74
94
  "\x00",
75
95
  [Tokens::TAG_Compound["foo", nil],
76
96
  Tokens::TAG_Long["bar", 0x0102030405060708],
77
- Tokens::TAG_End["", nil]]
97
+ Tokens::TAG_End["", nil]],
98
+ ["foo",
99
+ Types::Compound.new({
100
+ "bar" => Types::Long.new(0x0102030405060708)})]
78
101
 
79
102
  a_reader_or_writer "should handle float fields",
80
103
  "\x0a\x00\x03foo" \
@@ -82,7 +105,10 @@ shared_examples_for "readers and writers" do
82
105
  "\x00",
83
106
  [Tokens::TAG_Compound["foo", nil],
84
107
  Tokens::TAG_Float["bar", "\x3f\xa0\x00\x00".unpack("g").first],
85
- Tokens::TAG_End["", nil]]
108
+ Tokens::TAG_End["", nil]],
109
+ ["foo",
110
+ Types::Compound.new({
111
+ "bar" => Types::Float.new("\x3f\xa0\x00\x00".unpack("g").first)})]
86
112
 
87
113
  a_reader_or_writer "should handle double fields",
88
114
  "\x0a\x00\x03foo" \
@@ -90,7 +116,10 @@ shared_examples_for "readers and writers" do
90
116
  "\x00",
91
117
  [Tokens::TAG_Compound["foo", nil],
92
118
  Tokens::TAG_Double["bar", "\x3f\xf4\x00\x00\x00\x00\x00\x00".unpack("G").first],
93
- Tokens::TAG_End["", nil]]
119
+ Tokens::TAG_End["", nil]],
120
+ ["foo",
121
+ Types::Compound.new({
122
+ "bar" => Types::Double.new("\x3f\xf4\x00\x00\x00\x00\x00\x00".unpack("G").first)})]
94
123
 
95
124
  a_reader_or_writer "should handle nested compound fields",
96
125
  "\x0a\x00\x03foo" \
@@ -102,30 +131,39 @@ shared_examples_for "readers and writers" do
102
131
  Tokens::TAG_Compound["bar", nil],
103
132
  Tokens::TAG_Byte["hoge", 0x4e],
104
133
  Tokens::TAG_End["", nil],
105
- Tokens::TAG_End["", nil]]
134
+ Tokens::TAG_End["", nil]],
135
+ ["foo",
136
+ Types::Compound.new({
137
+ "bar" => Types::Compound.new({
138
+ "hoge" => Types::Byte.new(0x4e)})})]
106
139
 
107
140
  simple_list_types = [
108
- ["bytes", Tokens::TAG_Byte, 0x01, lambda { |ns| ns.pack("C*") }],
109
- ["shorts", Tokens::TAG_Short, 0x02, lambda { |ns| ns.pack("n*") }],
110
- ["ints", Tokens::TAG_Int, 0x03, lambda { |ns| ns.pack("N*") }],
111
- ["longs", Tokens::TAG_Long, 0x04, lambda { |ns| ns.map { |n| [n].pack("x4N") }.join("") }],
112
- ["floats", Tokens::TAG_Float, 0x05, lambda { |ns| ns.pack("g*") }],
113
- ["doubles", Tokens::TAG_Double, 0x06, lambda { |ns| ns.pack("G*") }]
141
+ ["bytes", Types::Byte, Tokens::TAG_Byte, 0x01, lambda { |ns| ns.pack("C*") }],
142
+ ["shorts", Types::Short, Tokens::TAG_Short, 0x02, lambda { |ns| ns.pack("n*") }],
143
+ ["ints", Types::Int, Tokens::TAG_Int, 0x03, lambda { |ns| ns.pack("N*") }],
144
+ ["longs", Types::Long, Tokens::TAG_Long, 0x04, lambda { |ns| ns.map { |n| [n].pack("x4N") }.join("") }],
145
+ ["floats", Types::Float, Tokens::TAG_Float, 0x05, lambda { |ns| ns.pack("g*") }],
146
+ ["doubles", Types::Double, Tokens::TAG_Double, 0x06, lambda { |ns| ns.pack("G*") }]
114
147
  ]
115
148
 
116
- for label, type, token, pack in simple_list_types
149
+ for label, type, token, repr, pack in simple_list_types
117
150
  values = [9, 5]
118
151
  a_reader_or_writer "should handle lists of #{label}",
119
152
  "\x0a\x00\x03foo" \
120
- "\x09\x00\x03bar#{[token].pack("C")}\x00\x00\x00\x02" \
153
+ "\x09\x00\x03bar#{[repr].pack("C")}\x00\x00\x00\x02" \
121
154
  "#{pack.call(values)}" \
122
155
  "\x00",
123
156
  [Tokens::TAG_Compound["foo", nil],
124
- Tokens::TAG_List["bar", type],
125
- type[0, values[0]],
126
- type[1, values[1]],
157
+ Tokens::TAG_List["bar", token],
158
+ token[0, values[0]],
159
+ token[1, values[1]],
127
160
  Tokens::TAG_End[2, nil],
128
- Tokens::TAG_End["", nil]]
161
+ Tokens::TAG_End["", nil]],
162
+ ["foo",
163
+ Types::Compound.new({
164
+ "bar" =>
165
+ Types::List.new(type,
166
+ values.map { |v| type.new(v) })})]
129
167
  end
130
168
 
131
169
  a_reader_or_writer "should handle nested lists",
@@ -140,7 +178,12 @@ shared_examples_for "readers and writers" do
140
178
  Tokens::TAG_Byte[0, 0x4a],
141
179
  Tokens::TAG_End[1, nil],
142
180
  Tokens::TAG_End[1, nil],
143
- Tokens::TAG_End["", nil]]
181
+ Tokens::TAG_End["", nil]],
182
+ ["foo",
183
+ Types::Compound.new({
184
+ "bar" => Types::List.new(Types::List, [
185
+ Types::List.new(Types::Byte,
186
+ [Types::Byte.new(0x4a)])])})]
144
187
  end
145
188
 
146
189
  describe "NBTFile::tokenize" do
@@ -148,7 +191,7 @@ describe "NBTFile::tokenize" do
148
191
 
149
192
  it_should_behave_like "readers and writers"
150
193
 
151
- def check_reader_or_writer(input, tokens)
194
+ def check_reader_or_writer(input, tokens, tree)
152
195
  io = make_zipped_stream(input)
153
196
  actual_tokens = []
154
197
  NBTFile.tokenize(io) do |token|
@@ -163,7 +206,7 @@ describe "NBTFile::tokenize without a block" do
163
206
 
164
207
  it_should_behave_like "readers and writers"
165
208
 
166
- def check_reader_or_writer(input, tokens)
209
+ def check_reader_or_writer(input, tokens, tree)
167
210
  io = make_zipped_stream(input)
168
211
  actual_tokens = NBTFile.tokenize(io)
169
212
  actual_tokens.should be_a_kind_of(Enumerable)
@@ -176,7 +219,7 @@ describe "NBTFile::emit" do
176
219
 
177
220
  it_should_behave_like "readers and writers"
178
221
 
179
- def check_reader_or_writer(output, tokens)
222
+ def check_reader_or_writer(output, tokens, tree)
180
223
  io = StringIO.new()
181
224
  NBTFile.emit(io) do |writer|
182
225
  for token in tokens
@@ -202,7 +245,8 @@ describe "NBTFile::emit" do
202
245
  "\x00",
203
246
  [Tokens::TAG_Compound["foo", nil],
204
247
  Tokens::TAG_String["bar", "hoge"._nbtfile_encode("UTF-16LE")],
205
- Tokens::TAG_End["", nil]]
248
+ Tokens::TAG_End["", nil]],
249
+ nil
206
250
  end
207
251
 
208
252
  it "should reject malformed UTF-8 strings" do
@@ -247,6 +291,30 @@ describe "NBTFile::emit" do
247
291
  end
248
292
  end
249
293
 
294
+ describe "NBTFile::read" do
295
+ include ZlibHelpers
296
+ it_should_behave_like "readers and writers"
297
+
298
+ def check_reader_or_writer(input, tokens, tree)
299
+ io = make_zipped_stream(input)
300
+ actual_tree = NBTFile.read(io)
301
+ actual_tree.should == tree
302
+ end
303
+ end
304
+
305
+ describe "NBTFile::write" do
306
+ include ZlibHelpers
307
+ it_should_behave_like "readers and writers"
308
+
309
+ def check_reader_or_writer(output, tokens, tree)
310
+ io = StringIO.new()
311
+ name, body = tree
312
+ NBTFile.write(io, name, body)
313
+ actual_output = unzip_string(io.string)
314
+ actual_output.should == output
315
+ end
316
+ end
317
+
250
318
  describe "NBTFile::load" do
251
319
  include ZlibHelpers
252
320
 
@@ -288,3 +356,27 @@ describe "NBTFile::load" do
288
356
  Tokens::TAG_End["", nil]],
289
357
  ["foo", {"bar" => [32, 45]}]
290
358
  end
359
+
360
+ describe "NBTFile::transcode_to_yaml" do
361
+ def self.nbtfile_transcode(description, tokens, result)
362
+ it description do
363
+ io = StringIO.new()
364
+ NBTFile.emit(io) do |writer|
365
+ for token in tokens
366
+ writer.emit_token(token)
367
+ end
368
+ end
369
+ out = StringIO.new()
370
+ NBTFile.transcode_to_yaml(StringIO.new(io.string), out)
371
+ actual_result = YAML.load(out.string)
372
+ actual_result.should == result
373
+ end
374
+ end
375
+
376
+ nbtfile_transcode "should transcode to YAML",
377
+ [Tokens::TAG_Compound["foo", nil],
378
+ Tokens::TAG_Byte["a", 19],
379
+ Tokens::TAG_Byte["b", 23],
380
+ Tokens::TAG_End[nil, nil]],
381
+ ["foo", {"a" => 19, "b" => 23}]
382
+ end
@@ -9,28 +9,41 @@ describe NBTFile do
9
9
 
10
10
  sample_pattern = File.join(File.dirname(__FILE__), '..', 'samples', '*.nbt')
11
11
 
12
+ def perform_and_check_roundtrip(file)
13
+ input = StringIO.new(File.read(file))
14
+ output = StringIO.new()
15
+ yield input, output
16
+ input_bytes = unzip_string(input.string)
17
+ output_bytes = unzip_string(output.string)
18
+
19
+ input_digest = Digest::SHA1.hexdigest(input_bytes)
20
+ output_digest = Digest::SHA1.hexdigest(output_bytes)
21
+
22
+ output_digest.should == input_digest
23
+ end
24
+
12
25
  def self.check_file(file)
13
- it "should roundtrip #{File.basename(file)}" do
14
- input = StringIO.new(File.read(file))
15
- output = StringIO.new()
16
-
17
- reader = NBTFile::Reader.new(input)
18
- writer = NBTFile::Writer.new(output)
19
- begin
20
- reader.each_token do |token|
21
- writer.emit_token(token)
26
+ basename = File.basename(file)
27
+
28
+ it "should roundtrip #{basename} at the token level" do
29
+ perform_and_check_roundtrip(file) do |input, output|
30
+ tokenizer = NBTFile::Tokenizer.new(input)
31
+ emitter = NBTFile::Emitter.new(output)
32
+ begin
33
+ tokenizer.each_token do |token|
34
+ emitter.emit_token(token)
35
+ end
36
+ ensure
37
+ emitter.finish
22
38
  end
23
- ensure
24
- writer.finish
25
39
  end
40
+ end
26
41
 
27
- input_bytes = unzip_string(input.string)
28
- output_bytes = unzip_string(output.string)
29
-
30
- input_digest = Digest::SHA1.hexdigest(input_bytes)
31
- output_digest = Digest::SHA1.hexdigest(output_bytes)
32
-
33
- output_digest.should == input_digest
42
+ it "should roundtrip #{basename} at the data model level" do
43
+ perform_and_check_roundtrip(file) do |input, output|
44
+ name, body = NBTFile.read(input)
45
+ NBTFile.write(output, name, body)
46
+ end
34
47
  end
35
48
  end
36
49
 
@@ -0,0 +1,197 @@
1
+ shared_examples_for "high-level types" do
2
+ it "should include NBTFile::Types::Base" do
3
+ @type.should < NBTFile::Types::Base
4
+ end
5
+ end
6
+
7
+ INTEGER_TYPE_CASES = {
8
+ NBTFile::Types::Byte => 8,
9
+ NBTFile::Types::Short => 16,
10
+ NBTFile::Types::Int => 32,
11
+ NBTFile::Types::Long => 64
12
+ }
13
+
14
+ INTEGER_TYPE_CASES.each do |type, bits|
15
+ range = (-2**(bits-1))..(2**(bits-1)-1)
16
+ describe "#{type}" do
17
+ it_should_behave_like "high-level types"
18
+
19
+ before :all do
20
+ @type = type
21
+ end
22
+
23
+ it "should reject values larger than #{range.end}" do
24
+ lambda { type.new(range.end+1) }.should raise_error(RangeError)
25
+ end
26
+
27
+ it "should reject values smaller than #{range.begin}" do
28
+ lambda { type.new(range.begin - 1) }.should raise_error(RangeError)
29
+ end
30
+
31
+ it "should accept integers" do
32
+ type.new(1)
33
+ end
34
+
35
+ it "should have a value attribute" do
36
+ type.new(42).value.should == 42
37
+ end
38
+
39
+ it "should reject non-integers" do
40
+ lambda { type.new(0.5) }.should raise_error(TypeError)
41
+ end
42
+
43
+ it "should support #to_int" do
44
+ type.new(3).to_int.should == 3
45
+ end
46
+
47
+ it "should support #to_i" do
48
+ type.new(3).to_i.should == 3
49
+ end
50
+
51
+ it "should support equality by value" do
52
+ type.new(3).should == 3
53
+ type.new(3).should_not == 4
54
+ type.new(3).should == type.new(3)
55
+ type.new(3).should_not == type.new(4)
56
+ end
57
+ end
58
+ end
59
+
60
+ shared_examples_for "floating-point high-level types" do
61
+ it "should accept Numerics" do
62
+ @type.new(3.3)
63
+ @type.new(3)
64
+ @type.new(2**68)
65
+ end
66
+
67
+ it "should not accept non-numerics" do
68
+ lambda { @type.new("3.3") }.should raise_error(TypeError)
69
+ end
70
+
71
+ it "should have a value attribute" do
72
+ @type.new(3.3).value.should == 3.3
73
+ end
74
+
75
+ it "should support #to_f" do
76
+ @type.new(3.3).to_f.should == 3.3
77
+ end
78
+
79
+ it "should support equality by value" do
80
+ @type.new(3.3).should == 3.3
81
+ @type.new(3.3).should_not == 4
82
+ @type.new(3.3).should == @type.new(3.3)
83
+ @type.new(3.3).should_not == @type.new(4)
84
+ end
85
+ end
86
+
87
+ describe NBTFile::Types::Float do
88
+ it_should_behave_like "high-level types"
89
+ it_should_behave_like "floating-point high-level types"
90
+
91
+ before :all do
92
+ @type = NBTFile::Types::Float
93
+ end
94
+ end
95
+
96
+ describe NBTFile::Types::Double do
97
+ it_should_behave_like "high-level types"
98
+ it_should_behave_like "floating-point high-level types"
99
+
100
+ before :all do
101
+ @type = NBTFile::Types::Double
102
+ end
103
+ end
104
+
105
+ describe NBTFile::Types::String do
106
+ it_should_behave_like "high-level types"
107
+
108
+ before :all do
109
+ @type = NBTFile::Types::String
110
+ end
111
+
112
+ it "should have a #value accessor" do
113
+ NBTFile::Types::String.new("foo").value.should == "foo"
114
+ end
115
+
116
+ it "should support #to_s" do
117
+ NBTFile::Types::String.new("foo").to_s.should == "foo"
118
+ end
119
+ end
120
+
121
+ describe NBTFile::Types::ByteArray do
122
+ it_should_behave_like "high-level types"
123
+
124
+ before :all do
125
+ @type = NBTFile::Types::ByteArray
126
+ end
127
+
128
+ it "should have a #value accessor" do
129
+ NBTFile::Types::ByteArray.new("foo").value.should == "foo"
130
+ end
131
+ end
132
+
133
+ describe NBTFile::Types::List do
134
+ it_should_behave_like "high-level types"
135
+
136
+ before :all do
137
+ @type = NBTFile::Types::List
138
+ end
139
+
140
+ before :each do
141
+ @instance = NBTFile::Types::List.new(NBTFile::Types::Int)
142
+ end
143
+
144
+ it "should accept instances of the given type" do
145
+ @instance << NBTFile::Types::Int.new(3)
146
+ @instance.length.should == 1
147
+ end
148
+
149
+ it "should reject instances of other types" do
150
+ lambda {
151
+ @instance << NBTFile::Types::Byte.new(3)
152
+ }.should raise_error(TypeError)
153
+ lambda {
154
+ @instance << 3
155
+ }.should raise_error(TypeError)
156
+ lambda {
157
+ @instance << nil
158
+ }.should raise_error(TypeError)
159
+ @instance.length.should == 0
160
+ end
161
+
162
+ it "should implement Enumerable" do
163
+ NBTFile::Types::List.should < Enumerable
164
+ end
165
+ end
166
+
167
+ describe NBTFile::Types::Compound do
168
+ it_should_behave_like "high-level types"
169
+
170
+ before :all do
171
+ @type = NBTFile::Types::Compound
172
+ end
173
+
174
+ before :each do
175
+ @instance = NBTFile::Types::Compound.new
176
+ end
177
+
178
+ it "should allow setting and retrieving a field" do
179
+ @instance["foo"] = NBTFile::Types::Int.new(3)
180
+ @instance["foo"].should == NBTFile::Types::Int.new(3)
181
+ end
182
+
183
+ it "should allow removing a field" do
184
+ @instance["foo"] = NBTFile::Types::Int.new(3)
185
+ @instance.delete "foo"
186
+ @instance.delete "foo"
187
+ @instance["foo"].should be_nil
188
+ end
189
+
190
+ it "should accept values deriving from NBTFile::Types::Base" do
191
+ @instance["foo"] = NBTFile::Types::Int.new(3)
192
+ end
193
+
194
+ it "should reject values not deriving from NBTFile::Types::Base" do
195
+ lambda { @instance["foo"] = 3 }.should raise_error(TypeError)
196
+ end
197
+ end
metadata CHANGED
@@ -4,9 +4,9 @@ version: !ruby/object:Gem::Version
4
4
  prerelease: false
5
5
  segments:
6
6
  - 0
7
+ - 1
7
8
  - 0
8
- - 8
9
- version: 0.0.8
9
+ version: 0.1.0
10
10
  platform: ruby
11
11
  authors:
12
12
  - MenTaLguY
@@ -14,7 +14,7 @@ autorequire:
14
14
  bindir: bin
15
15
  cert_chain: []
16
16
 
17
- date: 2010-11-05 00:00:00 -07:00
17
+ date: 2011-02-09 00:00:00 -08:00
18
18
  default_executable:
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency
@@ -42,7 +42,6 @@ extra_rdoc_files:
42
42
  - README.rdoc
43
43
  files:
44
44
  - .document
45
- - .gitignore
46
45
  - LICENSE
47
46
  - README.rdoc
48
47
  - Rakefile
@@ -54,13 +53,14 @@ files:
54
53
  - spec/nbtfile_spec.rb
55
54
  - spec/roundtrip_spec.rb
56
55
  - spec/spec_helper.rb
56
+ - spec/types_spec.rb
57
57
  has_rdoc: true
58
58
  homepage: http://github.com/mental/nbtfile
59
59
  licenses: []
60
60
 
61
61
  post_install_message:
62
- rdoc_options:
63
- - --charset=UTF-8
62
+ rdoc_options: []
63
+
64
64
  require_paths:
65
65
  - lib
66
66
  required_ruby_version: !ruby/object:Gem::Requirement
@@ -90,3 +90,4 @@ test_files:
90
90
  - spec/nbtfile_spec.rb
91
91
  - spec/roundtrip_spec.rb
92
92
  - spec/spec_helper.rb
93
+ - spec/types_spec.rb
data/.gitignore DELETED
@@ -1,23 +0,0 @@
1
- ## MAC OS
2
- .DS_Store
3
-
4
- ## TEXTMATE
5
- *.tmproj
6
- tmtags
7
-
8
- ## EMACS
9
- *~
10
- \#*
11
- .\#*
12
-
13
- ## VIM
14
- *.swp
15
-
16
- ## PROJECT::GENERAL
17
- coverage
18
- rdoc
19
- pkg
20
- *.rbc
21
-
22
- ## PROJECT::SPECIFIC
23
- nbtfile.gemspec