crossfilter-rails 1.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/LICENSE +22 -0
- data/README.md +27 -0
- data/Rakefile +1 -0
- data/crossfilter-rails.gemspec +21 -0
- data/lib/crossfilter-rails.rb +8 -0
- data/lib/crossfilter-rails/version.rb +5 -0
- data/vendor/assets/javascripts/crossfilter.js +1199 -0
- data/vendor/assets/javascripts/crossfilter.min.js +1 -0
- metadata +77 -0
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Vlad Gorodetsky
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
# crossfilter-rails
|
2
|
+
|
3
|
+
**Crossfilter** is a JavaScript library for exploring large multivariate datasets in the browser. Crossfilter supports extremely fast (<30ms) interaction with coordinated views, even with datasets containing a million or more records; we built it to power analytics for Square Register, allowing merchants to slice and dice their payment history fluidly.
|
4
|
+
|
5
|
+
Since most interactions only involve a single dimension, and then only small adjustments are made to the filter values, incremental filtering and reducing is significantly faster than starting from scratch. Crossfilter uses sorted indexes (and a few bit-twiddling hacks) to make this possible, dramatically increasing the performance of live histograms and top-K lists. Crossfilter is available under the [Apache License](/square/crossfilter/blob/master/LICENSE).
|
6
|
+
|
7
|
+
## Installation
|
8
|
+
|
9
|
+
Add the following to your gemfile:
|
10
|
+
|
11
|
+
gem 'crossfilter-rails'
|
12
|
+
|
13
|
+
And then execute:
|
14
|
+
|
15
|
+
$ bundle
|
16
|
+
|
17
|
+
Add the following directive to your JavaScript manifest file (application.js):
|
18
|
+
|
19
|
+
//= require crossfilter
|
20
|
+
|
21
|
+
## Contributing
|
22
|
+
|
23
|
+
1. Fork it
|
24
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
25
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
26
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
27
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require "bundler/gem_tasks"
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'crossfilter-rails/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |gem|
|
7
|
+
gem.name = "crossfilter-rails"
|
8
|
+
gem.version = Crossfilter::Rails::VERSION
|
9
|
+
gem.authors = ["Vlad Gorodetsky"]
|
10
|
+
gem.email = ["v@gor.io"]
|
11
|
+
gem.description = %q{Fast n-dimensional filtering and grouping of records.}
|
12
|
+
gem.summary = %q{Gemified crossfilter.js asset for Rails}
|
13
|
+
gem.homepage = "http://github.com/bai/crossfilter-rails"
|
14
|
+
|
15
|
+
gem.files = `git ls-files`.split($/)
|
16
|
+
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
17
|
+
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
18
|
+
gem.require_paths = ["lib"]
|
19
|
+
|
20
|
+
gem.add_dependency "railties", ">= 3.0", "< 5.0"
|
21
|
+
end
|
@@ -0,0 +1,1199 @@
|
|
1
|
+
(function(exports){
|
2
|
+
crossfilter.version = "1.1.0";
|
3
|
+
function crossfilter_identity(d) {
|
4
|
+
return d;
|
5
|
+
}
|
6
|
+
crossfilter.permute = permute;
|
7
|
+
|
8
|
+
function permute(array, index) {
|
9
|
+
for (var i = 0, n = index.length, copy = new Array(n); i < n; ++i) {
|
10
|
+
copy[i] = array[index[i]];
|
11
|
+
}
|
12
|
+
return copy;
|
13
|
+
}
|
14
|
+
var bisect = crossfilter.bisect = bisect_by(crossfilter_identity);
|
15
|
+
|
16
|
+
bisect.by = bisect_by;
|
17
|
+
|
18
|
+
function bisect_by(f) {
|
19
|
+
|
20
|
+
// Locate the insertion point for x in a to maintain sorted order. The
|
21
|
+
// arguments lo and hi may be used to specify a subset of the array which
|
22
|
+
// should be considered; by default the entire array is used. If x is already
|
23
|
+
// present in a, the insertion point will be before (to the left of) any
|
24
|
+
// existing entries. The return value is suitable for use as the first
|
25
|
+
// argument to `array.splice` assuming that a is already sorted.
|
26
|
+
//
|
27
|
+
// The returned insertion point i partitions the array a into two halves so
|
28
|
+
// that all v < x for v in a[lo:i] for the left side and all v >= x for v in
|
29
|
+
// a[i:hi] for the right side.
|
30
|
+
function bisectLeft(a, x, lo, hi) {
|
31
|
+
while (lo < hi) {
|
32
|
+
var mid = lo + hi >> 1;
|
33
|
+
if (f(a[mid]) < x) lo = mid + 1;
|
34
|
+
else hi = mid;
|
35
|
+
}
|
36
|
+
return lo;
|
37
|
+
}
|
38
|
+
|
39
|
+
// Similar to bisectLeft, but returns an insertion point which comes after (to
|
40
|
+
// the right of) any existing entries of x in a.
|
41
|
+
//
|
42
|
+
// The returned insertion point i partitions the array into two halves so that
|
43
|
+
// all v <= x for v in a[lo:i] for the left side and all v > x for v in
|
44
|
+
// a[i:hi] for the right side.
|
45
|
+
function bisectRight(a, x, lo, hi) {
|
46
|
+
while (lo < hi) {
|
47
|
+
var mid = lo + hi >> 1;
|
48
|
+
if (x < f(a[mid])) hi = mid;
|
49
|
+
else lo = mid + 1;
|
50
|
+
}
|
51
|
+
return lo;
|
52
|
+
}
|
53
|
+
|
54
|
+
bisectRight.right = bisectRight;
|
55
|
+
bisectRight.left = bisectLeft;
|
56
|
+
return bisectRight;
|
57
|
+
}
|
58
|
+
var heap = crossfilter.heap = heap_by(crossfilter_identity);
|
59
|
+
|
60
|
+
heap.by = heap_by;
|
61
|
+
|
62
|
+
function heap_by(f) {
|
63
|
+
|
64
|
+
// Builds a binary heap within the specified array a[lo:hi]. The heap has the
|
65
|
+
// property such that the parent a[lo+i] is always less than or equal to its
|
66
|
+
// two children: a[lo+2*i+1] and a[lo+2*i+2].
|
67
|
+
function heap(a, lo, hi) {
|
68
|
+
var n = hi - lo,
|
69
|
+
i = (n >>> 1) + 1;
|
70
|
+
while (--i > 0) sift(a, i, n, lo);
|
71
|
+
return a;
|
72
|
+
}
|
73
|
+
|
74
|
+
// Sorts the specified array a[lo:hi] in descending order, assuming it is
|
75
|
+
// already a heap.
|
76
|
+
function sort(a, lo, hi) {
|
77
|
+
var n = hi - lo,
|
78
|
+
t;
|
79
|
+
while (--n > 0) t = a[lo], a[lo] = a[lo + n], a[lo + n] = t, sift(a, 1, n, lo);
|
80
|
+
return a;
|
81
|
+
}
|
82
|
+
|
83
|
+
// Sifts the element a[lo+i-1] down the heap, where the heap is the contiguous
|
84
|
+
// slice of array a[lo:lo+n]. This method can also be used to update the heap
|
85
|
+
// incrementally, without incurring the full cost of reconstructing the heap.
|
86
|
+
function sift(a, i, n, lo) {
|
87
|
+
var d = a[--lo + i],
|
88
|
+
x = f(d),
|
89
|
+
child;
|
90
|
+
while ((child = i << 1) <= n) {
|
91
|
+
if (child < n && f(a[lo + child]) > f(a[lo + child + 1])) child++;
|
92
|
+
if (x <= f(a[lo + child])) break;
|
93
|
+
a[lo + i] = a[lo + child];
|
94
|
+
i = child;
|
95
|
+
}
|
96
|
+
a[lo + i] = d;
|
97
|
+
}
|
98
|
+
|
99
|
+
heap.sort = sort;
|
100
|
+
return heap;
|
101
|
+
}
|
102
|
+
var heapselect = crossfilter.heapselect = heapselect_by(crossfilter_identity);
|
103
|
+
|
104
|
+
heapselect.by = heapselect_by;
|
105
|
+
|
106
|
+
function heapselect_by(f) {
|
107
|
+
var heap = heap_by(f);
|
108
|
+
|
109
|
+
// Returns a new array containing the top k elements in the array a[lo:hi].
|
110
|
+
// The returned array is not sorted, but maintains the heap property. If k is
|
111
|
+
// greater than hi - lo, then fewer than k elements will be returned. The
|
112
|
+
// order of elements in a is unchanged by this operation.
|
113
|
+
function heapselect(a, lo, hi, k) {
|
114
|
+
var queue = new Array(k = Math.min(hi - lo, k)),
|
115
|
+
min,
|
116
|
+
i,
|
117
|
+
x,
|
118
|
+
d;
|
119
|
+
|
120
|
+
for (i = 0; i < k; ++i) queue[i] = a[lo++];
|
121
|
+
heap(queue, 0, k);
|
122
|
+
|
123
|
+
if (lo < hi) {
|
124
|
+
min = f(queue[0]);
|
125
|
+
do {
|
126
|
+
if (x = f(d = a[lo]) > min) {
|
127
|
+
queue[0] = d;
|
128
|
+
min = f(heap(queue, 0, k)[0]);
|
129
|
+
}
|
130
|
+
} while (++lo < hi);
|
131
|
+
}
|
132
|
+
|
133
|
+
return queue;
|
134
|
+
}
|
135
|
+
|
136
|
+
return heapselect;
|
137
|
+
}
|
138
|
+
var insertionsort = crossfilter.insertionsort = insertionsort_by(crossfilter_identity);
|
139
|
+
|
140
|
+
insertionsort.by = insertionsort_by;
|
141
|
+
|
142
|
+
function insertionsort_by(f) {
|
143
|
+
|
144
|
+
function insertionsort(a, lo, hi) {
|
145
|
+
for (var i = lo + 1; i < hi; ++i) {
|
146
|
+
for (var j = i, t = a[i], x = f(t); j > lo && f(a[j - 1]) > x; --j) {
|
147
|
+
a[j] = a[j - 1];
|
148
|
+
}
|
149
|
+
a[j] = t;
|
150
|
+
}
|
151
|
+
return a;
|
152
|
+
}
|
153
|
+
|
154
|
+
return insertionsort;
|
155
|
+
}
|
156
|
+
// Algorithm designed by Vladimir Yaroslavskiy.
|
157
|
+
// Implementation based on the Dart project; see lib/dart/LICENSE for details.
|
158
|
+
|
159
|
+
var quicksort = crossfilter.quicksort = quicksort_by(crossfilter_identity);
|
160
|
+
|
161
|
+
quicksort.by = quicksort_by;
|
162
|
+
|
163
|
+
function quicksort_by(f) {
|
164
|
+
var insertionsort = insertionsort_by(f);
|
165
|
+
|
166
|
+
function sort(a, lo, hi) {
|
167
|
+
return (hi - lo < quicksort_sizeThreshold
|
168
|
+
? insertionsort
|
169
|
+
: quicksort)(a, lo, hi);
|
170
|
+
}
|
171
|
+
|
172
|
+
function quicksort(a, lo, hi) {
|
173
|
+
|
174
|
+
// Compute the two pivots by looking at 5 elements.
|
175
|
+
var sixth = (hi - lo) / 6 | 0,
|
176
|
+
i1 = lo + sixth,
|
177
|
+
i5 = hi - 1 - sixth,
|
178
|
+
i3 = lo + hi - 1 >> 1, // The midpoint.
|
179
|
+
i2 = i3 - sixth,
|
180
|
+
i4 = i3 + sixth;
|
181
|
+
|
182
|
+
var e1 = a[i1], x1 = f(e1),
|
183
|
+
e2 = a[i2], x2 = f(e2),
|
184
|
+
e3 = a[i3], x3 = f(e3),
|
185
|
+
e4 = a[i4], x4 = f(e4),
|
186
|
+
e5 = a[i5], x5 = f(e5);
|
187
|
+
|
188
|
+
var t;
|
189
|
+
|
190
|
+
// Sort the selected 5 elements using a sorting network.
|
191
|
+
if (x1 > x2) t = e1, e1 = e2, e2 = t, t = x1, x1 = x2, x2 = t;
|
192
|
+
if (x4 > x5) t = e4, e4 = e5, e5 = t, t = x4, x4 = x5, x5 = t;
|
193
|
+
if (x1 > x3) t = e1, e1 = e3, e3 = t, t = x1, x1 = x3, x3 = t;
|
194
|
+
if (x2 > x3) t = e2, e2 = e3, e3 = t, t = x2, x2 = x3, x3 = t;
|
195
|
+
if (x1 > x4) t = e1, e1 = e4, e4 = t, t = x1, x1 = x4, x4 = t;
|
196
|
+
if (x3 > x4) t = e3, e3 = e4, e4 = t, t = x3, x3 = x4, x4 = t;
|
197
|
+
if (x2 > x5) t = e2, e2 = e5, e5 = t, t = x2, x2 = x5, x5 = t;
|
198
|
+
if (x2 > x3) t = e2, e2 = e3, e3 = t, t = x2, x2 = x3, x3 = t;
|
199
|
+
if (x4 > x5) t = e4, e4 = e5, e5 = t, t = x4, x4 = x5, x5 = t;
|
200
|
+
|
201
|
+
var pivot1 = e2, pivotValue1 = x2,
|
202
|
+
pivot2 = e4, pivotValue2 = x4;
|
203
|
+
|
204
|
+
// e2 and e4 have been saved in the pivot variables. They will be written
|
205
|
+
// back, once the partitioning is finished.
|
206
|
+
a[i1] = e1;
|
207
|
+
a[i2] = a[lo];
|
208
|
+
a[i3] = e3;
|
209
|
+
a[i4] = a[hi - 1];
|
210
|
+
a[i5] = e5;
|
211
|
+
|
212
|
+
var less = lo + 1, // First element in the middle partition.
|
213
|
+
great = hi - 2; // Last element in the middle partition.
|
214
|
+
|
215
|
+
// Note that for value comparison, <, <=, >= and > coerce to a primitive via
|
216
|
+
// Object.prototype.valueOf; == and === do not, so in order to be consistent
|
217
|
+
// with natural order (such as for Date objects), we must do two compares.
|
218
|
+
var pivotsEqual = pivotValue1 <= pivotValue2 && pivotValue1 >= pivotValue2;
|
219
|
+
if (pivotsEqual) {
|
220
|
+
|
221
|
+
// Degenerated case where the partitioning becomes a dutch national flag
|
222
|
+
// problem.
|
223
|
+
//
|
224
|
+
// [ | < pivot | == pivot | unpartitioned | > pivot | ]
|
225
|
+
// ^ ^ ^ ^ ^
|
226
|
+
// left less k great right
|
227
|
+
//
|
228
|
+
// a[left] and a[right] are undefined and are filled after the
|
229
|
+
// partitioning.
|
230
|
+
//
|
231
|
+
// Invariants:
|
232
|
+
// 1) for x in ]left, less[ : x < pivot.
|
233
|
+
// 2) for x in [less, k[ : x == pivot.
|
234
|
+
// 3) for x in ]great, right[ : x > pivot.
|
235
|
+
for (var k = less; k <= great; ++k) {
|
236
|
+
var ek = a[k], xk = f(ek);
|
237
|
+
if (xk < pivotValue1) {
|
238
|
+
if (k !== less) {
|
239
|
+
a[k] = a[less];
|
240
|
+
a[less] = ek;
|
241
|
+
}
|
242
|
+
++less;
|
243
|
+
} else if (xk > pivotValue1) {
|
244
|
+
|
245
|
+
// Find the first element <= pivot in the range [k - 1, great] and
|
246
|
+
// put [:ek:] there. We know that such an element must exist:
|
247
|
+
// When k == less, then el3 (which is equal to pivot) lies in the
|
248
|
+
// interval. Otherwise a[k - 1] == pivot and the search stops at k-1.
|
249
|
+
// Note that in the latter case invariant 2 will be violated for a
|
250
|
+
// short amount of time. The invariant will be restored when the
|
251
|
+
// pivots are put into their final positions.
|
252
|
+
while (true) {
|
253
|
+
var greatValue = f(a[great]);
|
254
|
+
if (greatValue > pivotValue1) {
|
255
|
+
great--;
|
256
|
+
// This is the only location in the while-loop where a new
|
257
|
+
// iteration is started.
|
258
|
+
continue;
|
259
|
+
} else if (greatValue < pivotValue1) {
|
260
|
+
// Triple exchange.
|
261
|
+
a[k] = a[less];
|
262
|
+
a[less++] = a[great];
|
263
|
+
a[great--] = ek;
|
264
|
+
break;
|
265
|
+
} else {
|
266
|
+
a[k] = a[great];
|
267
|
+
a[great--] = ek;
|
268
|
+
// Note: if great < k then we will exit the outer loop and fix
|
269
|
+
// invariant 2 (which we just violated).
|
270
|
+
break;
|
271
|
+
}
|
272
|
+
}
|
273
|
+
}
|
274
|
+
}
|
275
|
+
} else {
|
276
|
+
|
277
|
+
// We partition the list into three parts:
|
278
|
+
// 1. < pivot1
|
279
|
+
// 2. >= pivot1 && <= pivot2
|
280
|
+
// 3. > pivot2
|
281
|
+
//
|
282
|
+
// During the loop we have:
|
283
|
+
// [ | < pivot1 | >= pivot1 && <= pivot2 | unpartitioned | > pivot2 | ]
|
284
|
+
// ^ ^ ^ ^ ^
|
285
|
+
// left less k great right
|
286
|
+
//
|
287
|
+
// a[left] and a[right] are undefined and are filled after the
|
288
|
+
// partitioning.
|
289
|
+
//
|
290
|
+
// Invariants:
|
291
|
+
// 1. for x in ]left, less[ : x < pivot1
|
292
|
+
// 2. for x in [less, k[ : pivot1 <= x && x <= pivot2
|
293
|
+
// 3. for x in ]great, right[ : x > pivot2
|
294
|
+
for (var k = less; k <= great; k++) {
|
295
|
+
var ek = a[k], xk = f(ek);
|
296
|
+
if (xk < pivotValue1) {
|
297
|
+
if (k !== less) {
|
298
|
+
a[k] = a[less];
|
299
|
+
a[less] = ek;
|
300
|
+
}
|
301
|
+
++less;
|
302
|
+
} else {
|
303
|
+
if (xk > pivotValue2) {
|
304
|
+
while (true) {
|
305
|
+
var greatValue = f(a[great]);
|
306
|
+
if (greatValue > pivotValue2) {
|
307
|
+
great--;
|
308
|
+
if (great < k) break;
|
309
|
+
// This is the only location inside the loop where a new
|
310
|
+
// iteration is started.
|
311
|
+
continue;
|
312
|
+
} else {
|
313
|
+
// a[great] <= pivot2.
|
314
|
+
if (greatValue < pivotValue1) {
|
315
|
+
// Triple exchange.
|
316
|
+
a[k] = a[less];
|
317
|
+
a[less++] = a[great];
|
318
|
+
a[great--] = ek;
|
319
|
+
} else {
|
320
|
+
// a[great] >= pivot1.
|
321
|
+
a[k] = a[great];
|
322
|
+
a[great--] = ek;
|
323
|
+
}
|
324
|
+
break;
|
325
|
+
}
|
326
|
+
}
|
327
|
+
}
|
328
|
+
}
|
329
|
+
}
|
330
|
+
}
|
331
|
+
|
332
|
+
// Move pivots into their final positions.
|
333
|
+
// We shrunk the list from both sides (a[left] and a[right] have
|
334
|
+
// meaningless values in them) and now we move elements from the first
|
335
|
+
// and third partition into these locations so that we can store the
|
336
|
+
// pivots.
|
337
|
+
a[lo] = a[less - 1];
|
338
|
+
a[less - 1] = pivot1;
|
339
|
+
a[hi - 1] = a[great + 1];
|
340
|
+
a[great + 1] = pivot2;
|
341
|
+
|
342
|
+
// The list is now partitioned into three partitions:
|
343
|
+
// [ < pivot1 | >= pivot1 && <= pivot2 | > pivot2 ]
|
344
|
+
// ^ ^ ^ ^
|
345
|
+
// left less great right
|
346
|
+
|
347
|
+
// Recursive descent. (Don't include the pivot values.)
|
348
|
+
sort(a, lo, less - 1);
|
349
|
+
sort(a, great + 2, hi);
|
350
|
+
|
351
|
+
if (pivotsEqual) {
|
352
|
+
// All elements in the second partition are equal to the pivot. No
|
353
|
+
// need to sort them.
|
354
|
+
return a;
|
355
|
+
}
|
356
|
+
|
357
|
+
// In theory it should be enough to call _doSort recursively on the second
|
358
|
+
// partition.
|
359
|
+
// The Android source however removes the pivot elements from the recursive
|
360
|
+
// call if the second partition is too large (more than 2/3 of the list).
|
361
|
+
if (less < i1 && great > i5) {
|
362
|
+
var lessValue, greatValue;
|
363
|
+
while ((lessValue = f(a[less])) <= pivotValue1 && lessValue >= pivotValue1) ++less;
|
364
|
+
while ((greatValue = f(a[great])) <= pivotValue2 && greatValue >= pivotValue2) --great;
|
365
|
+
|
366
|
+
// Copy paste of the previous 3-way partitioning with adaptions.
|
367
|
+
//
|
368
|
+
// We partition the list into three parts:
|
369
|
+
// 1. == pivot1
|
370
|
+
// 2. > pivot1 && < pivot2
|
371
|
+
// 3. == pivot2
|
372
|
+
//
|
373
|
+
// During the loop we have:
|
374
|
+
// [ == pivot1 | > pivot1 && < pivot2 | unpartitioned | == pivot2 ]
|
375
|
+
// ^ ^ ^
|
376
|
+
// less k great
|
377
|
+
//
|
378
|
+
// Invariants:
|
379
|
+
// 1. for x in [ *, less[ : x == pivot1
|
380
|
+
// 2. for x in [less, k[ : pivot1 < x && x < pivot2
|
381
|
+
// 3. for x in ]great, * ] : x == pivot2
|
382
|
+
for (var k = less; k <= great; k++) {
|
383
|
+
var ek = a[k], xk = f(ek);
|
384
|
+
if (xk <= pivotValue1 && xk >= pivotValue1) {
|
385
|
+
if (k !== less) {
|
386
|
+
a[k] = a[less];
|
387
|
+
a[less] = ek;
|
388
|
+
}
|
389
|
+
less++;
|
390
|
+
} else {
|
391
|
+
if (xk <= pivotValue2 && xk >= pivotValue2) {
|
392
|
+
while (true) {
|
393
|
+
var greatValue = f(a[great]);
|
394
|
+
if (greatValue <= pivotValue2 && greatValue >= pivotValue2) {
|
395
|
+
great--;
|
396
|
+
if (great < k) break;
|
397
|
+
// This is the only location inside the loop where a new
|
398
|
+
// iteration is started.
|
399
|
+
continue;
|
400
|
+
} else {
|
401
|
+
// a[great] < pivot2.
|
402
|
+
if (greatValue < pivotValue1) {
|
403
|
+
// Triple exchange.
|
404
|
+
a[k] = a[less];
|
405
|
+
a[less++] = a[great];
|
406
|
+
a[great--] = ek;
|
407
|
+
} else {
|
408
|
+
// a[great] == pivot1.
|
409
|
+
a[k] = a[great];
|
410
|
+
a[great--] = ek;
|
411
|
+
}
|
412
|
+
break;
|
413
|
+
}
|
414
|
+
}
|
415
|
+
}
|
416
|
+
}
|
417
|
+
}
|
418
|
+
}
|
419
|
+
|
420
|
+
// The second partition has now been cleared of pivot elements and looks
|
421
|
+
// as follows:
|
422
|
+
// [ * | > pivot1 && < pivot2 | * ]
|
423
|
+
// ^ ^
|
424
|
+
// less great
|
425
|
+
// Sort the second partition using recursive descent.
|
426
|
+
|
427
|
+
// The second partition looks as follows:
|
428
|
+
// [ * | >= pivot1 && <= pivot2 | * ]
|
429
|
+
// ^ ^
|
430
|
+
// less great
|
431
|
+
// Simply sort it by recursive descent.
|
432
|
+
|
433
|
+
return sort(a, less, great + 1);
|
434
|
+
}
|
435
|
+
|
436
|
+
return sort;
|
437
|
+
}
|
438
|
+
|
439
|
+
var quicksort_sizeThreshold = 32;
|
440
|
+
var crossfilter_array8 = crossfilter_arrayUntyped,
|
441
|
+
crossfilter_array16 = crossfilter_arrayUntyped,
|
442
|
+
crossfilter_array32 = crossfilter_arrayUntyped,
|
443
|
+
crossfilter_arrayLengthen = crossfilter_identity,
|
444
|
+
crossfilter_arrayWiden = crossfilter_identity;
|
445
|
+
|
446
|
+
if (typeof Uint8Array !== "undefined") {
|
447
|
+
crossfilter_array8 = function(n) { return new Uint8Array(n); };
|
448
|
+
crossfilter_array16 = function(n) { return new Uint16Array(n); };
|
449
|
+
crossfilter_array32 = function(n) { return new Uint32Array(n); };
|
450
|
+
|
451
|
+
crossfilter_arrayLengthen = function(array, length) {
|
452
|
+
var copy = new array.constructor(length);
|
453
|
+
copy.set(array);
|
454
|
+
return copy;
|
455
|
+
};
|
456
|
+
|
457
|
+
crossfilter_arrayWiden = function(array, width) {
|
458
|
+
var copy;
|
459
|
+
switch (width) {
|
460
|
+
case 16: copy = crossfilter_array16(array.length); break;
|
461
|
+
case 32: copy = crossfilter_array32(array.length); break;
|
462
|
+
default: throw new Error("invalid array width!");
|
463
|
+
}
|
464
|
+
copy.set(array);
|
465
|
+
return copy;
|
466
|
+
};
|
467
|
+
}
|
468
|
+
|
469
|
+
function crossfilter_arrayUntyped(n) {
|
470
|
+
return new Array(n);
|
471
|
+
}
|
472
|
+
function crossfilter_filterExact(bisect, value) {
|
473
|
+
return function(values) {
|
474
|
+
var n = values.length;
|
475
|
+
return [bisect.left(values, value, 0, n), bisect.right(values, value, 0, n)];
|
476
|
+
};
|
477
|
+
}
|
478
|
+
|
479
|
+
function crossfilter_filterRange(bisect, range) {
|
480
|
+
var min = range[0],
|
481
|
+
max = range[1];
|
482
|
+
return function(values) {
|
483
|
+
var n = values.length;
|
484
|
+
return [bisect.left(values, min, 0, n), bisect.left(values, max, 0, n)];
|
485
|
+
};
|
486
|
+
}
|
487
|
+
|
488
|
+
function crossfilter_filterAll(values) {
|
489
|
+
return [0, values.length];
|
490
|
+
}
|
491
|
+
function crossfilter_null() {
|
492
|
+
return null;
|
493
|
+
}
|
494
|
+
function crossfilter_zero() {
|
495
|
+
return 0;
|
496
|
+
}
|
497
|
+
function crossfilter_reduceIncrement(p) {
|
498
|
+
return p + 1;
|
499
|
+
}
|
500
|
+
|
501
|
+
function crossfilter_reduceDecrement(p) {
|
502
|
+
return p - 1;
|
503
|
+
}
|
504
|
+
|
505
|
+
function crossfilter_reduceAdd(f) {
|
506
|
+
return function(p, v) {
|
507
|
+
return p + +f(v);
|
508
|
+
};
|
509
|
+
}
|
510
|
+
|
511
|
+
function crossfilter_reduceSubtract(f) {
|
512
|
+
return function(p, v) {
|
513
|
+
return p - f(v);
|
514
|
+
};
|
515
|
+
}
|
516
|
+
exports.crossfilter = crossfilter;
|
517
|
+
|
518
|
+
function crossfilter() {
|
519
|
+
var crossfilter = {
|
520
|
+
add: add,
|
521
|
+
dimension: dimension,
|
522
|
+
groupAll: groupAll,
|
523
|
+
size: size
|
524
|
+
};
|
525
|
+
|
526
|
+
var data = [], // the records
|
527
|
+
n = 0, // the number of records; data.length
|
528
|
+
m = 0, // number of dimensions in use
|
529
|
+
M = 8, // number of dimensions that can fit in `filters`
|
530
|
+
filters = crossfilter_array8(0), // M bits per record; 1 is filtered out
|
531
|
+
filterListeners = [], // when the filters change
|
532
|
+
dataListeners = []; // when data is added
|
533
|
+
|
534
|
+
// Adds the specified new records to this crossfilter.
|
535
|
+
function add(newData) {
|
536
|
+
var n0 = n,
|
537
|
+
n1 = newData.length;
|
538
|
+
|
539
|
+
// If there's actually new data to add…
|
540
|
+
// Merge the new data into the existing data.
|
541
|
+
// Lengthen the filter bitset to handle the new records.
|
542
|
+
// Notify listeners (dimensions and groups) that new data is available.
|
543
|
+
if (n1) {
|
544
|
+
data = data.concat(newData);
|
545
|
+
filters = crossfilter_arrayLengthen(filters, n += n1);
|
546
|
+
dataListeners.forEach(function(l) { l(newData, n0, n1); });
|
547
|
+
}
|
548
|
+
|
549
|
+
return crossfilter;
|
550
|
+
}
|
551
|
+
|
552
|
+
// Adds a new dimension with the specified value accessor function.
|
553
|
+
function dimension(value) {
|
554
|
+
var dimension = {
|
555
|
+
filter: filter,
|
556
|
+
filterExact: filterExact,
|
557
|
+
filterRange: filterRange,
|
558
|
+
filterAll: filterAll,
|
559
|
+
top: top,
|
560
|
+
bottom: bottom,
|
561
|
+
group: group,
|
562
|
+
groupAll: groupAll
|
563
|
+
};
|
564
|
+
|
565
|
+
var one = 1 << m++, // bit mask, e.g., 00001000
|
566
|
+
zero = ~one, // inverted one, e.g., 11110111
|
567
|
+
values, // sorted, cached array
|
568
|
+
index, // value rank ↦ object id
|
569
|
+
newValues, // temporary array storing newly-added values
|
570
|
+
newIndex, // temporary array storing newly-added index
|
571
|
+
sort = quicksort_by(function(i) { return newValues[i]; }),
|
572
|
+
refilter = crossfilter_filterAll, // for recomputing filter
|
573
|
+
indexListeners = [], // when data is added
|
574
|
+
lo0 = 0,
|
575
|
+
hi0 = 0;
|
576
|
+
|
577
|
+
// Updating a dimension is a two-stage process. First, we must update the
|
578
|
+
// associated filters for the newly-added records. Once all dimensions have
|
579
|
+
// updated their filters, the groups are notified to update.
|
580
|
+
dataListeners.unshift(preAdd);
|
581
|
+
dataListeners.push(postAdd);
|
582
|
+
|
583
|
+
// Incorporate any existing data into this dimension, and make sure that the
|
584
|
+
// filter bitset is wide enough to handle the new dimension.
|
585
|
+
if (m > M) filters = crossfilter_arrayWiden(filters, M <<= 1);
|
586
|
+
preAdd(data, 0, n);
|
587
|
+
postAdd(data, 0, n);
|
588
|
+
|
589
|
+
// Incorporates the specified new records into this dimension.
|
590
|
+
// This function is responsible for updating filters, values, and index.
|
591
|
+
function preAdd(newData, n0, n1) {
|
592
|
+
|
593
|
+
// Permute new values into natural order using a sorted index.
|
594
|
+
newValues = newData.map(value);
|
595
|
+
newIndex = sort(crossfilter_range(n1), 0, n1);
|
596
|
+
newValues = permute(newValues, newIndex);
|
597
|
+
|
598
|
+
// Bisect newValues to determine which new records are selected.
|
599
|
+
var bounds = refilter(newValues), lo1 = bounds[0], hi1 = bounds[1], i;
|
600
|
+
for (i = 0; i < lo1; ++i) filters[newIndex[i] + n0] |= one;
|
601
|
+
for (i = hi1; i < n1; ++i) filters[newIndex[i] + n0] |= one;
|
602
|
+
|
603
|
+
// If this dimension previously had no data, then we don't need to do the
|
604
|
+
// more expensive merge operation; use the new values and index as-is.
|
605
|
+
if (!n0) {
|
606
|
+
values = newValues;
|
607
|
+
index = newIndex;
|
608
|
+
lo0 = lo1;
|
609
|
+
hi0 = hi1;
|
610
|
+
return;
|
611
|
+
}
|
612
|
+
|
613
|
+
var oldValues = values,
|
614
|
+
oldIndex = index,
|
615
|
+
i0 = 0,
|
616
|
+
i1 = 0;
|
617
|
+
|
618
|
+
// Otherwise, create new arrays into which to merge new and old.
|
619
|
+
values = new Array(n);
|
620
|
+
index = crossfilter_index(n, n);
|
621
|
+
|
622
|
+
// Merge the old and new sorted values, and old and new index.
|
623
|
+
for (i = 0; i0 < n0 && i1 < n1; ++i) {
|
624
|
+
if (oldValues[i0] < newValues[i1]) {
|
625
|
+
values[i] = oldValues[i0];
|
626
|
+
index[i] = oldIndex[i0++];
|
627
|
+
} else {
|
628
|
+
values[i] = newValues[i1];
|
629
|
+
index[i] = newIndex[i1++] + n0;
|
630
|
+
}
|
631
|
+
}
|
632
|
+
|
633
|
+
// Add any remaining old values.
|
634
|
+
for (; i0 < n0; ++i0, ++i) {
|
635
|
+
values[i] = oldValues[i0];
|
636
|
+
index[i] = oldIndex[i0];
|
637
|
+
}
|
638
|
+
|
639
|
+
// Add any remaining new values.
|
640
|
+
for (; i1 < n1; ++i1, ++i) {
|
641
|
+
values[i] = newValues[i1];
|
642
|
+
index[i] = newIndex[i1] + n0;
|
643
|
+
}
|
644
|
+
|
645
|
+
// Bisect again to recompute lo0 and hi0.
|
646
|
+
bounds = refilter(values), lo0 = bounds[0], hi0 = bounds[1];
|
647
|
+
}
|
648
|
+
|
649
|
+
// When all filters have updated, notify index listeners of the new values.
|
650
|
+
function postAdd(newData, n0, n1) {
|
651
|
+
indexListeners.forEach(function(l) { l(newValues, newIndex, n0, n1); });
|
652
|
+
newValues = newIndex = null;
|
653
|
+
}
|
654
|
+
|
655
|
+
// Updates the selected values based on the specified bounds [lo, hi].
|
656
|
+
// This implementation is used by all the public filter methods.
|
657
|
+
function filterIndex(bounds) {
|
658
|
+
var i,
|
659
|
+
j,
|
660
|
+
k,
|
661
|
+
lo1 = bounds[0],
|
662
|
+
hi1 = bounds[1],
|
663
|
+
added = [],
|
664
|
+
removed = [];
|
665
|
+
|
666
|
+
// Fast incremental update based on previous lo index.
|
667
|
+
if (lo1 < lo0) {
|
668
|
+
for (i = lo1, j = Math.min(lo0, hi1); i < j; ++i) {
|
669
|
+
filters[k = index[i]] ^= one;
|
670
|
+
added.push(k);
|
671
|
+
}
|
672
|
+
} else if (lo1 > lo0) {
|
673
|
+
for (i = lo0, j = Math.min(lo1, hi0); i < j; ++i) {
|
674
|
+
filters[k = index[i]] ^= one;
|
675
|
+
removed.push(k);
|
676
|
+
}
|
677
|
+
}
|
678
|
+
|
679
|
+
// Fast incremental update based on previous hi index.
|
680
|
+
if (hi1 > hi0) {
|
681
|
+
for (i = Math.max(lo1, hi0), j = hi1; i < j; ++i) {
|
682
|
+
filters[k = index[i]] ^= one;
|
683
|
+
added.push(k);
|
684
|
+
}
|
685
|
+
} else if (hi1 < hi0) {
|
686
|
+
for (i = Math.max(lo0, hi1), j = hi0; i < j; ++i) {
|
687
|
+
filters[k = index[i]] ^= one;
|
688
|
+
removed.push(k);
|
689
|
+
}
|
690
|
+
}
|
691
|
+
|
692
|
+
lo0 = lo1;
|
693
|
+
hi0 = hi1;
|
694
|
+
filterListeners.forEach(function(l) { l(one, added, removed); });
|
695
|
+
return dimension;
|
696
|
+
}
|
697
|
+
|
698
|
+
// Filters this dimension using the specified range, value, or null.
|
699
|
+
// If the range is null, this is equivalent to filterAll.
|
700
|
+
// If the range is an array, this is equivalent to filterRange.
|
701
|
+
// Otherwise, this is equivalent to filterExact.
|
702
|
+
function filter(range) {
|
703
|
+
return range == null
|
704
|
+
? filterAll() : Array.isArray(range)
|
705
|
+
? filterRange(range)
|
706
|
+
: filterExact(range);
|
707
|
+
}
|
708
|
+
|
709
|
+
// Filters this dimension to select the exact value.
|
710
|
+
function filterExact(value) {
|
711
|
+
return filterIndex((refilter = crossfilter_filterExact(bisect, value))(values));
|
712
|
+
}
|
713
|
+
|
714
|
+
// Filters this dimension to select the specified range [lo, hi].
|
715
|
+
// The lower bound is inclusive, and the upper bound is exclusive.
|
716
|
+
function filterRange(range) {
|
717
|
+
return filterIndex((refilter = crossfilter_filterRange(bisect, range))(values));
|
718
|
+
}
|
719
|
+
|
720
|
+
// Clears any filters on this dimension.
|
721
|
+
function filterAll() {
|
722
|
+
return filterIndex((refilter = crossfilter_filterAll)(values));
|
723
|
+
}
|
724
|
+
|
725
|
+
// Returns the top K selected records based on this dimension's order.
|
726
|
+
// Note: observes this dimension's filter, unlike group and groupAll.
|
727
|
+
function top(k) {
|
728
|
+
var array = [],
|
729
|
+
i = hi0,
|
730
|
+
j;
|
731
|
+
|
732
|
+
while (--i >= lo0 && k > 0) {
|
733
|
+
if (!filters[j = index[i]]) {
|
734
|
+
array.push(data[j]);
|
735
|
+
--k;
|
736
|
+
}
|
737
|
+
}
|
738
|
+
|
739
|
+
return array;
|
740
|
+
}
|
741
|
+
|
742
|
+
// Returns the bottom K selected records based on this dimension's order.
|
743
|
+
// Note: observes this dimension's filter, unlike group and groupAll.
|
744
|
+
function bottom(k) {
|
745
|
+
var array = [],
|
746
|
+
i = lo0,
|
747
|
+
j;
|
748
|
+
|
749
|
+
while (i < hi0 && k > 0) {
|
750
|
+
if (!filters[j = index[i]]) {
|
751
|
+
array.push(data[j]);
|
752
|
+
--k;
|
753
|
+
}
|
754
|
+
i++;
|
755
|
+
}
|
756
|
+
|
757
|
+
return array;
|
758
|
+
}
|
759
|
+
|
760
|
+
// Adds a new group to this dimension, using the specified key function.
|
761
|
+
function group(key) {
|
762
|
+
var group = {
|
763
|
+
top: top,
|
764
|
+
all: all,
|
765
|
+
reduce: reduce,
|
766
|
+
reduceCount: reduceCount,
|
767
|
+
reduceSum: reduceSum,
|
768
|
+
order: order,
|
769
|
+
orderNatural: orderNatural,
|
770
|
+
size: size
|
771
|
+
};
|
772
|
+
|
773
|
+
var groups, // array of {key, value}
|
774
|
+
groupIndex, // object id ↦ group id
|
775
|
+
groupWidth = 8,
|
776
|
+
groupCapacity = crossfilter_capacity(groupWidth),
|
777
|
+
k = 0, // cardinality
|
778
|
+
select,
|
779
|
+
heap,
|
780
|
+
reduceAdd,
|
781
|
+
reduceRemove,
|
782
|
+
reduceInitial,
|
783
|
+
update = crossfilter_null,
|
784
|
+
reset = crossfilter_null,
|
785
|
+
resetNeeded = true;
|
786
|
+
|
787
|
+
if (arguments.length < 1) key = crossfilter_identity;
|
788
|
+
|
789
|
+
// The group listens to the crossfilter for when any dimension changes, so
|
790
|
+
// that it can update the associated reduce values. It must also listen to
|
791
|
+
// the parent dimension for when data is added, and compute new keys.
|
792
|
+
filterListeners.push(update);
|
793
|
+
indexListeners.push(add);
|
794
|
+
|
795
|
+
// Incorporate any existing data into the grouping.
|
796
|
+
add(values, index, 0, n);
|
797
|
+
|
798
|
+
// Incorporates the specified new values into this group.
|
799
|
+
// This function is responsible for updating groups and groupIndex.
|
800
|
+
function add(newValues, newIndex, n0, n1) {
|
801
|
+
var oldGroups = groups,
|
802
|
+
reIndex = crossfilter_index(k, groupCapacity),
|
803
|
+
add = reduceAdd,
|
804
|
+
initial = reduceInitial,
|
805
|
+
k0 = k, // old cardinality
|
806
|
+
i0 = 0, // index of old group
|
807
|
+
i1 = 0, // index of new record
|
808
|
+
j, // object id
|
809
|
+
g0, // old group
|
810
|
+
x0, // old key
|
811
|
+
x1, // new key
|
812
|
+
g, // group to add
|
813
|
+
x; // key of group to add
|
814
|
+
|
815
|
+
// If a reset is needed, we don't need to update the reduce values.
|
816
|
+
if (resetNeeded) add = initial = crossfilter_null;
|
817
|
+
|
818
|
+
// Reset the new groups (k is a lower bound).
|
819
|
+
// Also, make sure that groupIndex exists and is long enough.
|
820
|
+
groups = new Array(k), k = 0;
|
821
|
+
groupIndex = k0 > 1 ? crossfilter_arrayLengthen(groupIndex, n) : crossfilter_index(n, groupCapacity);
|
822
|
+
|
823
|
+
// Get the first old key (x0 of g0), if it exists.
|
824
|
+
if (k0) x0 = (g0 = oldGroups[0]).key;
|
825
|
+
|
826
|
+
// Find the first new key (x1), skipping NaN keys.
|
827
|
+
while (i1 < n1 && !((x1 = key(newValues[i1])) >= x1)) ++i1;
|
828
|
+
|
829
|
+
// While new keys remain…
|
830
|
+
while (i1 < n1) {
|
831
|
+
|
832
|
+
// Determine the lesser of the two current keys; new and old.
|
833
|
+
// If there are no old keys remaining, then always add the new key.
|
834
|
+
if (g0 && x0 <= x1) {
|
835
|
+
g = g0, x = x0;
|
836
|
+
|
837
|
+
// Record the new index of the old group.
|
838
|
+
reIndex[i0] = k;
|
839
|
+
|
840
|
+
// Retrieve the next old key.
|
841
|
+
if (g0 = oldGroups[++i0]) x0 = g0.key;
|
842
|
+
} else {
|
843
|
+
g = {key: x1, value: initial()}, x = x1;
|
844
|
+
}
|
845
|
+
|
846
|
+
// Add the lesser group.
|
847
|
+
groups[k] = g;
|
848
|
+
|
849
|
+
// Add any selected records belonging to the added group, while
|
850
|
+
// advancing the new key and populating the associated group index.
|
851
|
+
while (!(x1 > x)) {
|
852
|
+
groupIndex[j = newIndex[i1] + n0] = k;
|
853
|
+
if (!(filters[j] & zero)) g.value = add(g.value, data[j]);
|
854
|
+
if (++i1 >= n1) break;
|
855
|
+
x1 = key(newValues[i1]);
|
856
|
+
}
|
857
|
+
|
858
|
+
groupIncrement();
|
859
|
+
}
|
860
|
+
|
861
|
+
// Add any remaining old groups that were greater than all new keys.
|
862
|
+
// No incremental reduce is needed; these groups have no new records.
|
863
|
+
// Also record the new index of the old group.
|
864
|
+
while (i0 < k0) {
|
865
|
+
groups[reIndex[i0] = k] = oldGroups[i0++];
|
866
|
+
groupIncrement();
|
867
|
+
}
|
868
|
+
|
869
|
+
// If we added any new groups before any old groups,
|
870
|
+
// update the group index of all the old records.
|
871
|
+
if (k > i0) for (i0 = 0; i0 < n0; ++i0) {
|
872
|
+
groupIndex[i0] = reIndex[groupIndex[i0]];
|
873
|
+
}
|
874
|
+
|
875
|
+
// Modify the update and reset behavior based on the cardinality.
|
876
|
+
// If the cardinality is less than or equal to one, then the groupIndex
|
877
|
+
// is not needed. If the cardinality is zero, then there are no records
|
878
|
+
// and therefore no groups to update or reset. Note that we also must
|
879
|
+
// change the registered listener to point to the new method.
|
880
|
+
j = filterListeners.indexOf(update);
|
881
|
+
if (k > 1) {
|
882
|
+
update = updateMany;
|
883
|
+
reset = resetMany;
|
884
|
+
} else {
|
885
|
+
if (k === 1) {
|
886
|
+
update = updateOne;
|
887
|
+
reset = resetOne;
|
888
|
+
} else {
|
889
|
+
update = crossfilter_null;
|
890
|
+
reset = crossfilter_null;
|
891
|
+
}
|
892
|
+
groupIndex = null;
|
893
|
+
}
|
894
|
+
filterListeners[j] = update;
|
895
|
+
|
896
|
+
// Count the number of added groups,
|
897
|
+
// and widen the group index as needed.
|
898
|
+
function groupIncrement() {
|
899
|
+
if (++k === groupCapacity) {
|
900
|
+
reIndex = crossfilter_arrayWiden(reIndex, groupWidth <<= 1);
|
901
|
+
groupIndex = crossfilter_arrayWiden(groupIndex, groupWidth);
|
902
|
+
groupCapacity = crossfilter_capacity(groupWidth);
|
903
|
+
}
|
904
|
+
}
|
905
|
+
}
|
906
|
+
|
907
|
+
// Reduces the specified selected or deselected records.
|
908
|
+
// This function is only used when the cardinality is greater than 1.
|
909
|
+
function updateMany(filterOne, added, removed) {
|
910
|
+
if (filterOne === one || resetNeeded) return;
|
911
|
+
|
912
|
+
var i,
|
913
|
+
k,
|
914
|
+
n,
|
915
|
+
g;
|
916
|
+
|
917
|
+
// Add the added values.
|
918
|
+
for (i = 0, n = added.length; i < n; ++i) {
|
919
|
+
if (!(filters[k = added[i]] & zero)) {
|
920
|
+
g = groups[groupIndex[k]];
|
921
|
+
g.value = reduceAdd(g.value, data[k]);
|
922
|
+
}
|
923
|
+
}
|
924
|
+
|
925
|
+
// Remove the removed values.
|
926
|
+
for (i = 0, n = removed.length; i < n; ++i) {
|
927
|
+
if ((filters[k = removed[i]] & zero) === filterOne) {
|
928
|
+
g = groups[groupIndex[k]];
|
929
|
+
g.value = reduceRemove(g.value, data[k]);
|
930
|
+
}
|
931
|
+
}
|
932
|
+
}
|
933
|
+
|
934
|
+
// Reduces the specified selected or deselected records.
|
935
|
+
// This function is only used when the cardinality is 1.
|
936
|
+
function updateOne(filterOne, added, removed) {
|
937
|
+
if (filterOne === one || resetNeeded) return;
|
938
|
+
|
939
|
+
var i,
|
940
|
+
k,
|
941
|
+
n,
|
942
|
+
g = groups[0];
|
943
|
+
|
944
|
+
// Add the added values.
|
945
|
+
for (i = 0, n = added.length; i < n; ++i) {
|
946
|
+
if (!(filters[k = added[i]] & zero)) {
|
947
|
+
g.value = reduceAdd(g.value, data[k]);
|
948
|
+
}
|
949
|
+
}
|
950
|
+
|
951
|
+
// Remove the removed values.
|
952
|
+
for (i = 0, n = removed.length; i < n; ++i) {
|
953
|
+
if ((filters[k = removed[i]] & zero) === filterOne) {
|
954
|
+
g.value = reduceRemove(g.value, data[k]);
|
955
|
+
}
|
956
|
+
}
|
957
|
+
}
|
958
|
+
|
959
|
+
// Recomputes the group reduce values from scratch.
|
960
|
+
// This function is only used when the cardinality is greater than 1.
|
961
|
+
function resetMany() {
|
962
|
+
var i,
|
963
|
+
g;
|
964
|
+
|
965
|
+
// Reset all group values.
|
966
|
+
for (i = 0; i < k; ++i) {
|
967
|
+
groups[i].value = reduceInitial();
|
968
|
+
}
|
969
|
+
|
970
|
+
// Add any selected records.
|
971
|
+
for (i = 0; i < n; ++i) {
|
972
|
+
if (!(filters[i] & zero)) {
|
973
|
+
g = groups[groupIndex[i]];
|
974
|
+
g.value = reduceAdd(g.value, data[i]);
|
975
|
+
}
|
976
|
+
}
|
977
|
+
}
|
978
|
+
|
979
|
+
// Recomputes the group reduce values from scratch.
|
980
|
+
// This function is only used when the cardinality is 1.
|
981
|
+
function resetOne() {
|
982
|
+
var i,
|
983
|
+
g = groups[0];
|
984
|
+
|
985
|
+
// Reset the singleton group values.
|
986
|
+
g.value = reduceInitial();
|
987
|
+
|
988
|
+
// Add any selected records.
|
989
|
+
for (i = 0; i < n; ++i) {
|
990
|
+
if (!(filters[i] & zero)) {
|
991
|
+
g.value = reduceAdd(g.value, data[i]);
|
992
|
+
}
|
993
|
+
}
|
994
|
+
}
|
995
|
+
|
996
|
+
// Returns the array of group values, in the dimension's natural order.
|
997
|
+
function all() {
|
998
|
+
if (resetNeeded) reset(), resetNeeded = false;
|
999
|
+
return groups;
|
1000
|
+
}
|
1001
|
+
|
1002
|
+
// Returns a new array containing the top K group values, in reduce order.
|
1003
|
+
function top(k) {
|
1004
|
+
var top = select(all(), 0, groups.length, k);
|
1005
|
+
return heap.sort(top, 0, top.length);
|
1006
|
+
}
|
1007
|
+
|
1008
|
+
// Sets the reduce behavior for this group to use the specified functions.
|
1009
|
+
// This method lazily recomputes the reduce values, waiting until needed.
|
1010
|
+
function reduce(add, remove, initial) {
|
1011
|
+
reduceAdd = add;
|
1012
|
+
reduceRemove = remove;
|
1013
|
+
reduceInitial = initial;
|
1014
|
+
resetNeeded = true;
|
1015
|
+
return group;
|
1016
|
+
}
|
1017
|
+
|
1018
|
+
// A convenience method for reducing by count.
|
1019
|
+
function reduceCount() {
|
1020
|
+
return reduce(crossfilter_reduceIncrement, crossfilter_reduceDecrement, crossfilter_zero);
|
1021
|
+
}
|
1022
|
+
|
1023
|
+
// A convenience method for reducing by sum(value).
|
1024
|
+
function reduceSum(value) {
|
1025
|
+
return reduce(crossfilter_reduceAdd(value), crossfilter_reduceSubtract(value), crossfilter_zero);
|
1026
|
+
}
|
1027
|
+
|
1028
|
+
// Sets the reduce order, using the specified accessor.
|
1029
|
+
function order(value) {
|
1030
|
+
select = heapselect_by(valueOf);
|
1031
|
+
heap = heap_by(valueOf);
|
1032
|
+
function valueOf(d) { return value(d.value); }
|
1033
|
+
return group;
|
1034
|
+
}
|
1035
|
+
|
1036
|
+
// A convenience method for natural ordering by reduce value.
|
1037
|
+
function orderNatural() {
|
1038
|
+
return order(crossfilter_identity);
|
1039
|
+
}
|
1040
|
+
|
1041
|
+
// Returns the cardinality of this group, irrespective of any filters.
|
1042
|
+
function size() {
|
1043
|
+
return k;
|
1044
|
+
}
|
1045
|
+
|
1046
|
+
return reduceCount().orderNatural();
|
1047
|
+
}
|
1048
|
+
|
1049
|
+
// A convenience function for generating a singleton group.
|
1050
|
+
function groupAll() {
|
1051
|
+
var g = group(crossfilter_null), all = g.all;
|
1052
|
+
delete g.all;
|
1053
|
+
delete g.top;
|
1054
|
+
delete g.order;
|
1055
|
+
delete g.orderNatural;
|
1056
|
+
delete g.size;
|
1057
|
+
g.value = function() { return all()[0].value; };
|
1058
|
+
return g;
|
1059
|
+
}
|
1060
|
+
|
1061
|
+
return dimension;
|
1062
|
+
}
|
1063
|
+
|
1064
|
+
// A convenience method for groupAll on a dummy dimension.
|
1065
|
+
// This implementation can be optimized since it is always cardinality 1.
|
1066
|
+
function groupAll() {
|
1067
|
+
var group = {
|
1068
|
+
reduce: reduce,
|
1069
|
+
reduceCount: reduceCount,
|
1070
|
+
reduceSum: reduceSum,
|
1071
|
+
value: value
|
1072
|
+
};
|
1073
|
+
|
1074
|
+
var reduceValue,
|
1075
|
+
reduceAdd,
|
1076
|
+
reduceRemove,
|
1077
|
+
reduceInitial,
|
1078
|
+
resetNeeded = true;
|
1079
|
+
|
1080
|
+
// The group listens to the crossfilter for when any dimension changes, so
|
1081
|
+
// that it can update the reduce value. It must also listen to the parent
|
1082
|
+
// dimension for when data is added.
|
1083
|
+
filterListeners.push(update);
|
1084
|
+
dataListeners.push(add);
|
1085
|
+
|
1086
|
+
// For consistency; actually a no-op since resetNeeded is true.
|
1087
|
+
add(data, 0, n);
|
1088
|
+
|
1089
|
+
// Incorporates the specified new values into this group.
|
1090
|
+
function add(newData, n0, n1) {
|
1091
|
+
var i;
|
1092
|
+
|
1093
|
+
if (resetNeeded) return;
|
1094
|
+
|
1095
|
+
// Add the added values.
|
1096
|
+
for (i = n0; i < n; ++i) {
|
1097
|
+
if (!filters[i]) {
|
1098
|
+
reduceValue = reduceAdd(reduceValue, data[i]);
|
1099
|
+
}
|
1100
|
+
}
|
1101
|
+
}
|
1102
|
+
|
1103
|
+
// Reduces the specified selected or deselected records.
|
1104
|
+
function update(filterOne, added, removed) {
|
1105
|
+
var i,
|
1106
|
+
k,
|
1107
|
+
n;
|
1108
|
+
|
1109
|
+
if (resetNeeded) return;
|
1110
|
+
|
1111
|
+
// Add the added values.
|
1112
|
+
for (i = 0, n = added.length; i < n; ++i) {
|
1113
|
+
if (!filters[k = added[i]]) {
|
1114
|
+
reduceValue = reduceAdd(reduceValue, data[k]);
|
1115
|
+
}
|
1116
|
+
}
|
1117
|
+
|
1118
|
+
// Remove the removed values.
|
1119
|
+
for (i = 0, n = removed.length; i < n; ++i) {
|
1120
|
+
if (filters[k = removed[i]] === filterOne) {
|
1121
|
+
reduceValue = reduceRemove(reduceValue, data[k]);
|
1122
|
+
}
|
1123
|
+
}
|
1124
|
+
}
|
1125
|
+
|
1126
|
+
// Recomputes the group reduce value from scratch.
|
1127
|
+
function reset() {
|
1128
|
+
var i;
|
1129
|
+
|
1130
|
+
reduceValue = reduceInitial();
|
1131
|
+
|
1132
|
+
for (i = 0; i < n; ++i) {
|
1133
|
+
if (!filters[i]) {
|
1134
|
+
reduceValue = reduceAdd(reduceValue, data[i]);
|
1135
|
+
}
|
1136
|
+
}
|
1137
|
+
}
|
1138
|
+
|
1139
|
+
// Sets the reduce behavior for this group to use the specified functions.
|
1140
|
+
// This method lazily recomputes the reduce value, waiting until needed.
|
1141
|
+
function reduce(add, remove, initial) {
|
1142
|
+
reduceAdd = add;
|
1143
|
+
reduceRemove = remove;
|
1144
|
+
reduceInitial = initial;
|
1145
|
+
resetNeeded = true;
|
1146
|
+
return group;
|
1147
|
+
}
|
1148
|
+
|
1149
|
+
// A convenience method for reducing by count.
|
1150
|
+
function reduceCount() {
|
1151
|
+
return reduce(crossfilter_reduceIncrement, crossfilter_reduceDecrement, crossfilter_zero);
|
1152
|
+
}
|
1153
|
+
|
1154
|
+
// A convenience method for reducing by sum(value).
|
1155
|
+
function reduceSum(value) {
|
1156
|
+
return reduce(crossfilter_reduceAdd(value), crossfilter_reduceSubtract(value), crossfilter_zero);
|
1157
|
+
}
|
1158
|
+
|
1159
|
+
// Returns the computed reduce value.
|
1160
|
+
function value() {
|
1161
|
+
if (resetNeeded) reset(), resetNeeded = false;
|
1162
|
+
return reduceValue;
|
1163
|
+
}
|
1164
|
+
|
1165
|
+
return reduceCount();
|
1166
|
+
}
|
1167
|
+
|
1168
|
+
// Returns the number of records in this crossfilter, irrespective of any filters.
|
1169
|
+
function size() {
|
1170
|
+
return n;
|
1171
|
+
}
|
1172
|
+
|
1173
|
+
return arguments.length
|
1174
|
+
? add(arguments[0])
|
1175
|
+
: crossfilter;
|
1176
|
+
}
|
1177
|
+
|
1178
|
+
// Returns an array of size n, big enough to store ids up to m.
|
1179
|
+
function crossfilter_index(n, m) {
|
1180
|
+
return (m < 0x101
|
1181
|
+
? crossfilter_array8 : m < 0x10001
|
1182
|
+
? crossfilter_array16
|
1183
|
+
: crossfilter_array32)(n);
|
1184
|
+
}
|
1185
|
+
|
1186
|
+
// Constructs a new array of size n, with sequential values from 0 to n - 1.
|
1187
|
+
function crossfilter_range(n) {
|
1188
|
+
var range = crossfilter_index(n, n);
|
1189
|
+
for (var i = -1; ++i < n;) range[i] = i;
|
1190
|
+
return range;
|
1191
|
+
}
|
1192
|
+
|
1193
|
+
function crossfilter_capacity(w) {
|
1194
|
+
return w === 8
|
1195
|
+
? 0x100 : w === 16
|
1196
|
+
? 0x10000
|
1197
|
+
: 0x100000000;
|
1198
|
+
}
|
1199
|
+
})(this);
|