sourcemap 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +1 -0
- data/.travis.yml +6 -0
- data/Gemfile +2 -0
- data/LICENSE +22 -0
- data/README.md +173 -0
- data/Rakefile +9 -0
- data/examples/bar.coffee +10 -0
- data/examples/bar.js +27 -0
- data/examples/bar.map +10 -0
- data/examples/foo.coffee +28 -0
- data/examples/foo.js +48 -0
- data/examples/foo.map +10 -0
- data/examples/foo.min.js +2 -0
- data/examples/foo.min.map +1 -0
- data/examples/foobar.html +6 -0
- data/examples/foobar.js +77 -0
- data/examples/foobar.map +1 -0
- data/lib/source_map.rb +4 -0
- data/lib/source_map/map.rb +219 -0
- data/lib/source_map/offset.rb +84 -0
- data/lib/source_map/version.rb +3 -0
- data/lib/source_map/vlq.rb +98 -0
- data/lib/sourcemap.rb +1 -0
- data/sourcemap.gemspec +24 -0
- data/test/test_map.rb +178 -0
- data/test/test_offset.rb +60 -0
- data/test/test_vlq.rb +62 -0
- metadata +117 -0
data/examples/foobar.map
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":null,"lineCount":74,"mappings":";AACA;CAAA,KAAA,gDAAA;KAAA,aAAA;;CAAA,CAAA,CAAW,GAAX;;CAAA,CACA,CAAW,CADX,IACA;;CAGA,CAAA,EAAgB,IAAhB;AAAU,CAAV,CAAA,CAAS,CAAT,EAAA;IAJA;;CAAA,CAOA,CAAS,GAAT,GAAU;CAAM,EAAI,QAAJ;CAPhB,EAOS;;CAPT,CAUA,CAAO,CAAP;;CAVA,CAaA,CACE,CADF;CACE,CAAQ,EAAR;CAAA,CACQ,EAAR,EAAA;CADA,CAEQ,CAAA,CAAR,KAAS;CAAM,EAAI,GAAA,OAAJ;CAFf,IAEQ;CAhBV,GAAA;;CAAA,CAmBA,CAAO,CAAP,KAAO;CACL,OAAA,OAAA;CAAA,CADc,EAAR,mDACN;CAAM,CAAQ,GAAd,CAAA,CAAA,IAAA;CApBF,EAmBO;;CAIP,CAAA,EAAsB,0CAAtB;CAAA,GAAA,CAAA,OAAA;IAvBA;;CAAA,CA0BA,GAAA;;AAAS,CAAA;UAAA,iCAAA;sBAAA;CAAA,EAAA,CAAI;CAAJ;;CA1BT;CAAA;;;ACAA;CAAA,KAAA,6DAAA;;CAAA;CAAA,MAAA,oCAAA;qBAAA;CAAA,EAAA,CAAA;CAAA,EAAA;;CAAA,CAGA,CAAU,GAAA,CAAV,CAAU,EAAA;;AACV,CAAA,MAAA,iDAAA;uBAAA;CAAA,CAAY,CAAH,CAAT;CAAA,EAJA;;CAAA,CAOA,CAAQ,EAAR,IAAQ,CAAA,CAAA;;AACR,CAAA,MAAA,uCAAA;sBAAA;IAAgC,CAAU;CAA1C,EAAA,CAAA,EAAA;MAAA;CAAA,EARA;CAAA;","sources":["foo.coffee","bar.coffee"],"names":[]}
|
data/lib/source_map.rb
ADDED
@@ -0,0 +1,219 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
require 'source_map/offset'
|
4
|
+
require 'source_map/vlq'
|
5
|
+
|
6
|
+
module SourceMap
|
7
|
+
Mapping = Struct.new(:source, :generated, :original, :name) do
|
8
|
+
def to_s
|
9
|
+
"#{generated.line}:#{generated.column}->#{original.line}:#{original.column}"
|
10
|
+
end
|
11
|
+
|
12
|
+
alias_method :inspect, :to_s
|
13
|
+
end
|
14
|
+
|
15
|
+
class Map
|
16
|
+
include Enumerable
|
17
|
+
|
18
|
+
def self.from_json(json)
|
19
|
+
from_hash JSON.parse(json)
|
20
|
+
end
|
21
|
+
|
22
|
+
def self.from_hash(hash)
|
23
|
+
str = hash['mappings']
|
24
|
+
sources = hash['sources']
|
25
|
+
names = hash['names']
|
26
|
+
|
27
|
+
mappings = decode_vlq_mappings(str, sources, names)
|
28
|
+
new(mappings, hash['file'])
|
29
|
+
end
|
30
|
+
|
31
|
+
# Internal: Decode VLQ mappings and match up sources and symbol names.
|
32
|
+
#
|
33
|
+
# str - VLQ string from 'mappings' attribute
|
34
|
+
# sources - Array of Strings from 'sources' attribute
|
35
|
+
# names - Array of Strings from 'names' attribute
|
36
|
+
#
|
37
|
+
# Returns an Array of Mappings.
|
38
|
+
def self.decode_vlq_mappings(str, sources = [], names = [])
|
39
|
+
mappings = []
|
40
|
+
|
41
|
+
source_id = 0
|
42
|
+
original_line = 1
|
43
|
+
original_column = 0
|
44
|
+
name_id = 0
|
45
|
+
|
46
|
+
VLQ.decode_mappings(str).each_with_index do |group, index|
|
47
|
+
generated_column = 0
|
48
|
+
generated_line = index + 1
|
49
|
+
|
50
|
+
group.each do |segment|
|
51
|
+
generated_column += segment[0]
|
52
|
+
generated = Offset.new(generated_line, generated_column)
|
53
|
+
|
54
|
+
if segment.size >= 4
|
55
|
+
source_id += segment[1]
|
56
|
+
original_line += segment[2]
|
57
|
+
original_column += segment[3]
|
58
|
+
|
59
|
+
source = sources[source_id]
|
60
|
+
original = Offset.new(original_line, original_column)
|
61
|
+
else
|
62
|
+
# TODO: Research this case
|
63
|
+
next
|
64
|
+
end
|
65
|
+
|
66
|
+
if segment[4]
|
67
|
+
name_id += segment[4]
|
68
|
+
name = names[name_id]
|
69
|
+
end
|
70
|
+
|
71
|
+
mappings << Mapping.new(source, generated, original, name)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
mappings
|
76
|
+
end
|
77
|
+
|
78
|
+
def initialize(mappings = [], filename = nil)
|
79
|
+
@mappings, @filename = mappings, filename
|
80
|
+
end
|
81
|
+
|
82
|
+
attr_reader :filename
|
83
|
+
|
84
|
+
def line_count
|
85
|
+
@line_count ||= @mappings.any? ? @mappings.last.generated.line : 0
|
86
|
+
end
|
87
|
+
|
88
|
+
def size
|
89
|
+
@mappings.size
|
90
|
+
end
|
91
|
+
|
92
|
+
def [](i)
|
93
|
+
@mappings[i]
|
94
|
+
end
|
95
|
+
|
96
|
+
def each(&block)
|
97
|
+
@mappings.each(&block)
|
98
|
+
end
|
99
|
+
|
100
|
+
def to_s
|
101
|
+
@string ||= build_vlq_string
|
102
|
+
end
|
103
|
+
|
104
|
+
def sources
|
105
|
+
@sources ||= @mappings.map(&:source).uniq.compact
|
106
|
+
end
|
107
|
+
|
108
|
+
def names
|
109
|
+
@names ||= @mappings.map(&:name).uniq.compact
|
110
|
+
end
|
111
|
+
|
112
|
+
def +(other)
|
113
|
+
mappings = @mappings.dup
|
114
|
+
offset = line_count + 1
|
115
|
+
other.each do |m|
|
116
|
+
mappings << Mapping.new(
|
117
|
+
m.source, m.generated + offset,
|
118
|
+
m.original, m.name
|
119
|
+
)
|
120
|
+
end
|
121
|
+
self.class.new(mappings)
|
122
|
+
end
|
123
|
+
|
124
|
+
def |(other)
|
125
|
+
mappings = []
|
126
|
+
|
127
|
+
other.each do |m|
|
128
|
+
om = bsearch(m.original)
|
129
|
+
next unless om
|
130
|
+
|
131
|
+
mappings << Mapping.new(
|
132
|
+
om.source, m.generated,
|
133
|
+
om.original, om.name
|
134
|
+
)
|
135
|
+
end
|
136
|
+
|
137
|
+
self.class.new(mappings, other.filename)
|
138
|
+
end
|
139
|
+
|
140
|
+
def bsearch(offset, from = 0, to = size - 1)
|
141
|
+
mid = (from + to) / 2
|
142
|
+
|
143
|
+
# We haven't found a match
|
144
|
+
if from > to
|
145
|
+
return from < 1 ? nil : self[from-1]
|
146
|
+
end
|
147
|
+
|
148
|
+
# We found an exact match
|
149
|
+
if offset == self[mid].generated
|
150
|
+
self[mid]
|
151
|
+
|
152
|
+
# We need to filter more
|
153
|
+
elsif offset < self[mid].generated
|
154
|
+
bsearch(offset, from, mid - 1)
|
155
|
+
elsif offset > self[mid].generated
|
156
|
+
bsearch(offset, mid + 1, to)
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
def as_json
|
161
|
+
{
|
162
|
+
"version" => 3,
|
163
|
+
"file" => filename,
|
164
|
+
"lineCount" => line_count,
|
165
|
+
"mappings" => to_s,
|
166
|
+
"sources" => sources,
|
167
|
+
"names" => names
|
168
|
+
}
|
169
|
+
end
|
170
|
+
|
171
|
+
protected
|
172
|
+
def build_vlq_string
|
173
|
+
source_id = 0
|
174
|
+
source_line = 1
|
175
|
+
source_column = 0
|
176
|
+
name_id = 0
|
177
|
+
|
178
|
+
by_lines = @mappings.group_by { |m| m.generated.line }
|
179
|
+
|
180
|
+
ary = (1..by_lines.keys.max).map do |line|
|
181
|
+
generated_column = 0
|
182
|
+
|
183
|
+
(by_lines[line] || []).map do |mapping|
|
184
|
+
group = []
|
185
|
+
group << mapping.generated.column - generated_column
|
186
|
+
group << sources_index[mapping.source] - source_id
|
187
|
+
group << mapping.original.line - source_line
|
188
|
+
group << mapping.original.column - source_column
|
189
|
+
group << names_index[mapping.name] - name_id if mapping.name
|
190
|
+
|
191
|
+
generated_column = mapping.generated.column
|
192
|
+
source_id = sources_index[mapping.source]
|
193
|
+
source_line = mapping.original.line
|
194
|
+
source_column = mapping.original.column
|
195
|
+
name_id = names_index[mapping.name] if mapping.name
|
196
|
+
|
197
|
+
group
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
VLQ.encode_mappings(ary)
|
202
|
+
end
|
203
|
+
|
204
|
+
def sources_index
|
205
|
+
@sources_index ||= build_index(sources)
|
206
|
+
end
|
207
|
+
|
208
|
+
def names_index
|
209
|
+
@names_index ||= build_index(names)
|
210
|
+
end
|
211
|
+
|
212
|
+
private
|
213
|
+
def build_index(array)
|
214
|
+
index = {}
|
215
|
+
array.each_with_index { |v, i| index[v] = i }
|
216
|
+
index
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
module SourceMap
|
2
|
+
# Public: Offset is an immutable structure representing a position in
|
3
|
+
# a source file.
|
4
|
+
class Offset
|
5
|
+
include Comparable
|
6
|
+
|
7
|
+
# Public: Construct Offset value.
|
8
|
+
#
|
9
|
+
# Returns Offset instance.
|
10
|
+
def self.new(*args)
|
11
|
+
case args.first
|
12
|
+
when Offset
|
13
|
+
args.first
|
14
|
+
when Array
|
15
|
+
super(*args.first)
|
16
|
+
else
|
17
|
+
super(*args)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
# Public: Initialize an Offset.
|
22
|
+
#
|
23
|
+
# line - Integer line number
|
24
|
+
# column - Integer column number
|
25
|
+
def initialize(line, column)
|
26
|
+
@line, @column = line, column
|
27
|
+
end
|
28
|
+
|
29
|
+
# Public: Gets Integer line of offset
|
30
|
+
attr_reader :line
|
31
|
+
|
32
|
+
# Public: Get Integer column of offset
|
33
|
+
attr_reader :column
|
34
|
+
|
35
|
+
# Public: Shift the offset by some value.
|
36
|
+
#
|
37
|
+
# other - An Offset to add by its line and column
|
38
|
+
# Or an Integer to add by line
|
39
|
+
#
|
40
|
+
# Returns a new Offset instance.
|
41
|
+
def +(other)
|
42
|
+
case other
|
43
|
+
when Offset
|
44
|
+
Offset.new(self.line + other.line, self.column + other.column)
|
45
|
+
when Integer
|
46
|
+
Offset.new(self.line + other, self.column)
|
47
|
+
else
|
48
|
+
raise ArgumentError, "can't convert #{other} into #{self.class}"
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# Public: Compare Offset to another.
|
53
|
+
#
|
54
|
+
# Useful for determining if a position in a few is between two offsets.
|
55
|
+
#
|
56
|
+
# other - Another Offset
|
57
|
+
#
|
58
|
+
# Returns a negative number when other is smaller and a positive number
|
59
|
+
# when its greater. Implements the Comparable#<=> protocol.
|
60
|
+
def <=>(other)
|
61
|
+
case other
|
62
|
+
when Offset
|
63
|
+
diff = self.line - other.line
|
64
|
+
diff.zero? ? self.column - other.column : diff
|
65
|
+
else
|
66
|
+
raise ArgumentError, "can't convert #{other.class} into #{self.class}"
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
# Public: Get a simple string representation of the offset
|
71
|
+
#
|
72
|
+
# Returns a String.
|
73
|
+
def to_s
|
74
|
+
"#{line}:#{column}"
|
75
|
+
end
|
76
|
+
|
77
|
+
# Public: Get a pretty inspect output for debugging purposes.
|
78
|
+
#
|
79
|
+
# Returns a String.
|
80
|
+
def inspect
|
81
|
+
"#<#{self.class} line=#{line}, column=#{column}>"
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
module SourceMap
|
2
|
+
# Public: Base64 VLQ encoding
|
3
|
+
#
|
4
|
+
# Adopted from ConradIrwin/ruby-source_map
|
5
|
+
# https://github.com/ConradIrwin/ruby-source_map/blob/master/lib/source_map/vlq.rb
|
6
|
+
#
|
7
|
+
# Resources
|
8
|
+
#
|
9
|
+
# http://en.wikipedia.org/wiki/Variable-length_quantity
|
10
|
+
# https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
11
|
+
# https://github.com/mozilla/source-map/blob/master/lib/source-map/base64-vlq.js
|
12
|
+
#
|
13
|
+
module VLQ
|
14
|
+
VLQ_BASE_SHIFT = 5
|
15
|
+
VLQ_BASE = 1 << VLQ_BASE_SHIFT
|
16
|
+
VLQ_BASE_MASK = VLQ_BASE - 1
|
17
|
+
VLQ_CONTINUATION_BIT = VLQ_BASE
|
18
|
+
|
19
|
+
BASE64_DIGITS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('')
|
20
|
+
BASE64_VALUES = (0...64).inject({}) { |h, i| h[BASE64_DIGITS[i]] = i; h }
|
21
|
+
|
22
|
+
# Public: Encode a list of numbers into a compact VLQ string.
|
23
|
+
#
|
24
|
+
# ary - An Array of Integers
|
25
|
+
#
|
26
|
+
# Returns a VLQ String.
|
27
|
+
def self.encode(ary)
|
28
|
+
result = ""
|
29
|
+
ary.each do |n|
|
30
|
+
vlq = n < 0 ? ((-n) << 1) + 1 : n << 1
|
31
|
+
begin
|
32
|
+
digit = vlq & VLQ_BASE_MASK
|
33
|
+
vlq >>= VLQ_BASE_SHIFT
|
34
|
+
digit |= VLQ_CONTINUATION_BIT if vlq > 0
|
35
|
+
result << BASE64_DIGITS[digit]
|
36
|
+
end while vlq > 0
|
37
|
+
end
|
38
|
+
result
|
39
|
+
end
|
40
|
+
|
41
|
+
# Public: Decode a VLQ string.
|
42
|
+
#
|
43
|
+
# str - VLQ encoded String
|
44
|
+
#
|
45
|
+
# Returns an Array of Integers.
|
46
|
+
def self.decode(str)
|
47
|
+
result = []
|
48
|
+
chars = str.split('')
|
49
|
+
while chars.any?
|
50
|
+
vlq = 0
|
51
|
+
shift = 0
|
52
|
+
continuation = true
|
53
|
+
while continuation
|
54
|
+
char = chars.shift
|
55
|
+
raise ArgumentError unless char
|
56
|
+
digit = BASE64_VALUES[char]
|
57
|
+
continuation = false if (digit & VLQ_CONTINUATION_BIT) == 0
|
58
|
+
digit &= VLQ_BASE_MASK
|
59
|
+
vlq += digit << shift
|
60
|
+
shift += VLQ_BASE_SHIFT
|
61
|
+
end
|
62
|
+
result << (vlq & 1 == 1 ? -(vlq >> 1) : vlq >> 1)
|
63
|
+
end
|
64
|
+
result
|
65
|
+
end
|
66
|
+
|
67
|
+
# Public: Encode a mapping array into a compact VLQ string.
|
68
|
+
#
|
69
|
+
# ary - Two dimensional Array of Integers.
|
70
|
+
#
|
71
|
+
# Returns a VLQ encoded String seperated by , and ;.
|
72
|
+
def self.encode_mappings(ary)
|
73
|
+
ary.map { |group|
|
74
|
+
group.map { |segment|
|
75
|
+
encode(segment)
|
76
|
+
}.join(',')
|
77
|
+
}.join(';')
|
78
|
+
end
|
79
|
+
|
80
|
+
# Public: Decode a VLQ string into mapping numbers.
|
81
|
+
#
|
82
|
+
# str - VLQ encoded String
|
83
|
+
#
|
84
|
+
# Returns an two dimensional Array of Integers.
|
85
|
+
def self.decode_mappings(str)
|
86
|
+
mappings = []
|
87
|
+
|
88
|
+
str.split(';').each_with_index do |group, index|
|
89
|
+
mappings[index] = []
|
90
|
+
group.split(',').each do |segment|
|
91
|
+
mappings[index] << decode(segment)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
mappings
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
data/lib/sourcemap.rb
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require 'source_map'
|
data/sourcemap.gemspec
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'source_map/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "sourcemap"
|
8
|
+
spec.version = SourceMap::VERSION
|
9
|
+
spec.authors = ["Josh Peek", "Alex MacCaw"]
|
10
|
+
spec.email = ["alex@alexmaccaw.com"]
|
11
|
+
spec.description = %q{Ruby source maps}
|
12
|
+
spec.summary = %q{Ruby source maps}
|
13
|
+
spec.homepage = "http://github.com/maccman/sourcemap"
|
14
|
+
spec.license = "MIT"
|
15
|
+
|
16
|
+
spec.files = `git ls-files`.split($/)
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
|
+
spec.require_paths = ["lib"]
|
20
|
+
|
21
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
22
|
+
spec.add_development_dependency "rake"
|
23
|
+
spec.add_development_dependency "minitest"
|
24
|
+
end
|
data/test/test_map.rb
ADDED
@@ -0,0 +1,178 @@
|
|
1
|
+
require 'minitest/autorun'
|
2
|
+
require 'source_map/map'
|
3
|
+
|
4
|
+
class TestMap < MiniTest::Unit::TestCase
|
5
|
+
include SourceMap
|
6
|
+
|
7
|
+
def setup
|
8
|
+
@mappings = Map.new([
|
9
|
+
Mapping.new('a.js', Offset.new(0, 0), Offset.new(0, 0)),
|
10
|
+
Mapping.new('b.js', Offset.new(1, 0), Offset.new(20, 0)),
|
11
|
+
Mapping.new('c.js', Offset.new(2, 0), Offset.new(30, 0))
|
12
|
+
])
|
13
|
+
end
|
14
|
+
|
15
|
+
def test_map
|
16
|
+
hash = {
|
17
|
+
'version' => 3,
|
18
|
+
'file' => "script.min.js",
|
19
|
+
'lineCount' => 1,
|
20
|
+
'mappings' => "AAEAA,QAASA,MAAK,EAAG,CACfC,OAAAC,IAAA,CAAY,eAAZ,CADe",
|
21
|
+
'sources' => ["script.js"],
|
22
|
+
'names' => ["hello", "console", "log"]
|
23
|
+
}
|
24
|
+
map = Map.from_hash(hash)
|
25
|
+
|
26
|
+
assert mapping = map[0]
|
27
|
+
assert_equal 1, mapping.generated.line
|
28
|
+
assert_equal 0, mapping.generated.column
|
29
|
+
assert_equal 3, mapping.original.line
|
30
|
+
assert_equal 0, mapping.original.column
|
31
|
+
assert_equal 'script.js', mapping.source
|
32
|
+
assert_equal 'hello', mapping.name
|
33
|
+
|
34
|
+
assert mapping = map[-1]
|
35
|
+
assert_equal 1, mapping.generated.line
|
36
|
+
assert_equal 45, mapping.generated.column
|
37
|
+
assert_equal 3, mapping.original.line
|
38
|
+
assert_equal 17, mapping.original.column
|
39
|
+
assert_equal 'script.js', mapping.source
|
40
|
+
assert_equal nil, mapping.name
|
41
|
+
|
42
|
+
assert_equal hash['lineCount'], map.line_count
|
43
|
+
assert_equal hash['sources'], map.sources
|
44
|
+
assert_equal hash['names'], map.names
|
45
|
+
assert_equal hash['mappings'], map.to_s
|
46
|
+
|
47
|
+
assert_equal hash, map.as_json
|
48
|
+
end
|
49
|
+
|
50
|
+
def test_map2
|
51
|
+
hash = {
|
52
|
+
'version' => 3,
|
53
|
+
'file' => "example.js",
|
54
|
+
'lineCount' => 43,
|
55
|
+
'mappings' => ";;;;;EACAA;;EACAC;;EAGA;IAAA;;;EAGAC;IAAS;;;EAGTC;;EAGAC;IACE;IACA;IACA;MAAQ;;;;EAGVC;;;IACE;;;EAGF;IAAA;;;EAGAC;;;IAAQ;;MAAA",
|
56
|
+
'sources' => ["example.coffee"],
|
57
|
+
'names' => ["number", "opposite", "square", "list", "math", "race", "cubes"]
|
58
|
+
}
|
59
|
+
map = Map.from_hash(hash)
|
60
|
+
|
61
|
+
assert mapping = map[0]
|
62
|
+
assert_equal 6, mapping.generated.line
|
63
|
+
assert_equal 2, mapping.generated.column
|
64
|
+
assert_equal 2, mapping.original.line
|
65
|
+
assert_equal 0, mapping.original.column
|
66
|
+
assert_equal 'example.coffee', mapping.source
|
67
|
+
assert_equal 'number', mapping.name
|
68
|
+
|
69
|
+
assert mapping = map[-1]
|
70
|
+
assert_equal 43, mapping.generated.line
|
71
|
+
assert_equal 6, mapping.generated.column
|
72
|
+
assert_equal 28, mapping.original.line
|
73
|
+
assert_equal 8, mapping.original.column
|
74
|
+
assert_equal 'example.coffee', mapping.source
|
75
|
+
assert_equal nil, mapping.name
|
76
|
+
|
77
|
+
assert_equal hash['lineCount'], map.line_count
|
78
|
+
assert_equal hash['sources'], map.sources
|
79
|
+
assert_equal hash['names'], map.names
|
80
|
+
assert_equal hash['mappings'], map.to_s
|
81
|
+
end
|
82
|
+
|
83
|
+
def test_map3
|
84
|
+
hash = {
|
85
|
+
'version' => 3,
|
86
|
+
'file' => "example.min.js",
|
87
|
+
'lineCount' => 1,
|
88
|
+
'mappings' => "AACC,SAAQ,EAAG,CAAA,IACCA,CADD,CACOC,CADP,CACaC,CADb,CAC0CC,CAWpDA,EAAA,CAASA,QAAQ,CAACC,CAAD,CAAI,CACnB,MAAOA,EAAP,CAAWA,CADQ,CAIrBJ,EAAA,CAAO,CAAC,CAAD,CAAI,CAAJ,CAAO,CAAP,CAAU,CAAV,CAAa,CAAb,CAEPC,EAAA,CAAO,MACCI,IAAAC,KADD,QAEGH,CAFH,MAGCI,QAAQ,CAACH,CAAD,CAAI,CAChB,MAAOA,EAAP,CAAWD,CAAA,CAAOC,CAAP,CADK,CAHb,CAcc,YAArB,GAAI,MAAOI,MAAX,EAA8C,IAA9C,GAAoCA,KAApC,EACEC,KAAA,CAAM,YAAN,CAGO,UAAQ,EAAG,CAAA,IACdC,CADc,CACVC,CADU,CACJC,CACdA,EAAA,CAAW,EACNF,EAAA,CAAK,CAAV,KAAaC,CAAb,CAAoBX,CAAAa,OAApB,CAAiCH,CAAjC,CAAsCC,CAAtC,CAA4CD,CAAA,EAA5C,CACER,CACA,CADMF,CAAA,CAAKU,CAAL,CACN,CAAAE,CAAAE,KAAA,CAAcb,CAAAM,KAAA,CAAUL,CAAV,CAAd,CAEF,OAAOU,EAPW,CAAX,CAAA,EApCC,CAAX,CAAAG,KAAA,CA8CO,IA9CP",
|
89
|
+
'sources' => ["example.js"],
|
90
|
+
'names' => ["list","math","num","square","x","Math","sqrt","cube","elvis","alert","_i","_len","_results","length","push","call"]
|
91
|
+
}
|
92
|
+
map = Map.from_hash(hash)
|
93
|
+
|
94
|
+
assert mapping = map[0]
|
95
|
+
assert_equal 1, mapping.generated.line
|
96
|
+
assert_equal 0, mapping.generated.column
|
97
|
+
assert_equal 2, mapping.original.line
|
98
|
+
assert_equal 1, mapping.original.column
|
99
|
+
assert_equal 'example.js', mapping.source
|
100
|
+
assert_equal nil, mapping.name
|
101
|
+
|
102
|
+
assert mapping = map[-1]
|
103
|
+
assert_equal 1, mapping.generated.line
|
104
|
+
assert_equal 289, mapping.generated.column
|
105
|
+
assert_equal 2, mapping.original.line
|
106
|
+
assert_equal 1, mapping.original.column
|
107
|
+
assert_equal 'example.js', mapping.source
|
108
|
+
assert_equal nil, mapping.name
|
109
|
+
|
110
|
+
assert_equal hash['lineCount'], map.line_count
|
111
|
+
assert_equal hash['sources'], map.sources
|
112
|
+
assert_equal hash['names'], map.names
|
113
|
+
assert_equal hash['mappings'], map.to_s
|
114
|
+
end
|
115
|
+
|
116
|
+
def test_line_count
|
117
|
+
# assert_equal 3, @mappings.line_count
|
118
|
+
end
|
119
|
+
|
120
|
+
def test_to_s
|
121
|
+
assert_equal "ACmBA;ACUA", @mappings.to_s
|
122
|
+
end
|
123
|
+
|
124
|
+
def test_sources
|
125
|
+
assert_equal ["a.js", "b.js", "c.js"], @mappings.sources
|
126
|
+
end
|
127
|
+
|
128
|
+
def test_names
|
129
|
+
assert_equal [], @mappings.names
|
130
|
+
end
|
131
|
+
|
132
|
+
def test_add
|
133
|
+
mappings2 = Map.new([
|
134
|
+
Mapping.new('d.js', Offset.new(0, 0), Offset.new(0, 0))
|
135
|
+
])
|
136
|
+
mappings3 = @mappings + mappings2
|
137
|
+
assert_equal 0, mappings3[0].generated.line
|
138
|
+
assert_equal 1, mappings3[1].generated.line
|
139
|
+
assert_equal 2, mappings3[2].generated.line
|
140
|
+
assert_equal 3, mappings3[3].generated.line
|
141
|
+
end
|
142
|
+
|
143
|
+
def test_pipe
|
144
|
+
mappings1 = Map.from_json(%{
|
145
|
+
{
|
146
|
+
"version": 3,
|
147
|
+
"file": "index.js",
|
148
|
+
"sourceRoot": "",
|
149
|
+
"sources": [
|
150
|
+
"index.coffee"
|
151
|
+
],
|
152
|
+
"names": [],
|
153
|
+
"mappings": ";AAAA;AAAA,MAAA,IAAA;;AAAA,EAAA,IAAA,GAAO,SAAA,GAAA;WACL,KAAA,CAAM,aAAN,EADK;EAAA,CAAP,CAAA;;AAGA,EAAA,IAAW,IAAX;AAAA,IAAG,IAAH,CAAA,CAAA,CAAA;GAHA;AAAA"
|
154
|
+
}
|
155
|
+
})
|
156
|
+
|
157
|
+
mappings2 = Map.from_json(%{
|
158
|
+
{
|
159
|
+
"version":3,
|
160
|
+
"file":"index.min.js",
|
161
|
+
"sources":["index.js"],
|
162
|
+
"names":["test","alert","call","this"],
|
163
|
+
"mappings":"CACA,WACE,GAAIA,KAEJA,MAAO,WACL,MAAOC,OAAM,eAGf,IAAI,KAAM,CACRD,SAGDE,KAAKC"
|
164
|
+
}
|
165
|
+
})
|
166
|
+
|
167
|
+
mappings3 = mappings1 | mappings2
|
168
|
+
assert_equal 'CAAA,WAAA,GAAA,KAAA,MAAO,WAAA,MACL,OAAM,eAER,IAAW,KAAX,CAAG,SAHH,KAAA', mappings3.to_s
|
169
|
+
end
|
170
|
+
|
171
|
+
def test_bsearch
|
172
|
+
assert_equal Offset.new(0, 0), @mappings.bsearch(Offset.new(0, 0)).original
|
173
|
+
assert_equal Offset.new(0, 0), @mappings.bsearch(Offset.new(0, 5)).original
|
174
|
+
assert_equal Offset.new(20, 0), @mappings.bsearch(Offset.new(1, 0)).original
|
175
|
+
assert_equal Offset.new(20, 0), @mappings.bsearch(Offset.new(1, 0)).original
|
176
|
+
assert_equal Offset.new(30, 0), @mappings.bsearch(Offset.new(2, 0)).original
|
177
|
+
end
|
178
|
+
end
|