embulk-executor-mapreduce 0.2.2 → 0.2.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/classpath/{embulk-executor-mapreduce-0.2.2.jar → embulk-executor-mapreduce-0.2.3.jar} +0 -0
- data/src/main/java/org/embulk/executor/mapreduce/MapReduceExecutor.java +48 -24
- data/src/main/java/org/embulk/executor/mapreduce/TimestampPartitioning.java +11 -6
- data/src/test/java/org/embulk/executor/mapreduce/MapReduceExecutorTestRuntime.java +130 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestAttemptState.java +58 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestEmbulkInputFormat.java +54 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestEmbulkInputSplit.java +46 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestEmbulkRecordReader.java +25 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestMapReduceExecutor.java +251 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestPageBufferWritable.java +84 -0
- data/src/test/java/org/embulk/executor/mapreduce/TestTimestampPartitioning.java +222 -0
- data/src/test/resources/config/core-site.xml +8 -0
- data/src/test/resources/config/embulk_mapred_config.yml +38 -0
- data/src/test/resources/config/embulk_mapred_invalid_config_files_config.yml +38 -0
- data/src/test/resources/config/embulk_mapred_invalid_libjars_config.yml +40 -0
- data/src/test/resources/config/embulk_mapred_invalid_partitioning_config.yml +40 -0
- data/src/test/resources/config/embulk_mapred_invalid_reducers_config.yml +44 -0
- data/src/test/resources/config/embulk_mapred_partitioning_config.yml +43 -0
- data/src/test/resources/config/embulk_mapred_stop_on_invalid_record_config.yml +39 -0
- data/src/test/resources/config/hdfs-site.xml +18 -0
- data/src/test/resources/config/mapred-site.xml +8 -0
- data/src/test/resources/fixtures/csv/sample1.csv +3 -0
- data/src/test/resources/fixtures/csv/sample2.csv +4 -0
- data/src/test/resources/fixtures/invalid_csv/sample1.csv +4 -0
- data/src/test/resources/fixtures/invalid_csv/sample2.csv +3 -0
- metadata +25 -3
@@ -0,0 +1,25 @@
|
|
1
|
+
package org.embulk.executor.mapreduce;
|
2
|
+
|
3
|
+
import org.apache.hadoop.io.NullWritable;
|
4
|
+
import org.junit.Test;
|
5
|
+
|
6
|
+
import static org.junit.Assert.assertEquals;
|
7
|
+
import static org.junit.Assert.assertTrue;
|
8
|
+
|
9
|
+
public class TestEmbulkRecordReader
|
10
|
+
{
|
11
|
+
@Test
|
12
|
+
public void simpleTest()
|
13
|
+
{
|
14
|
+
int[] taskIndexes = new int[] {0, 1, 4, 6, 7};
|
15
|
+
try (EmbulkRecordReader r = new EmbulkRecordReader(new EmbulkInputSplit(taskIndexes))) {
|
16
|
+
int i = 0;
|
17
|
+
while (r.nextKeyValue()) {
|
18
|
+
assertEquals(taskIndexes[i], r.getCurrentKey().get());
|
19
|
+
assertTrue(r.getCurrentValue() instanceof NullWritable);
|
20
|
+
i++;
|
21
|
+
}
|
22
|
+
assertEquals(taskIndexes.length, i);
|
23
|
+
}
|
24
|
+
}
|
25
|
+
}
|
@@ -0,0 +1,251 @@
|
|
1
|
+
package org.embulk.executor.mapreduce;
|
2
|
+
|
3
|
+
import com.google.common.base.Function;
|
4
|
+
import com.google.common.collect.ImmutableList;
|
5
|
+
import com.google.common.collect.Iterables;
|
6
|
+
import com.google.inject.Binder;
|
7
|
+
import com.google.inject.Module;
|
8
|
+
import com.google.inject.Provider;
|
9
|
+
import com.google.inject.util.Modules;
|
10
|
+
import org.embulk.EmbulkEmbed;
|
11
|
+
import org.embulk.RandomManager;
|
12
|
+
import org.embulk.config.ConfigException;
|
13
|
+
import org.embulk.config.ConfigLoader;
|
14
|
+
import org.embulk.config.ConfigSource;
|
15
|
+
import org.embulk.config.UserDataExceptions;
|
16
|
+
import org.embulk.exec.PartialExecutionException;
|
17
|
+
import org.embulk.spi.ExecutorPlugin;
|
18
|
+
import org.junit.Before;
|
19
|
+
import org.junit.Test;
|
20
|
+
import org.slf4j.ILoggerFactory;
|
21
|
+
import org.slf4j.impl.Log4jLoggerFactory;
|
22
|
+
|
23
|
+
import java.io.FileNotFoundException;
|
24
|
+
import java.io.IOException;
|
25
|
+
import java.util.List;
|
26
|
+
import java.util.Random;
|
27
|
+
|
28
|
+
import static org.embulk.plugin.InjectedPluginSource.registerPluginTo;
|
29
|
+
import static org.junit.Assert.assertTrue;
|
30
|
+
import static org.junit.Assert.fail;
|
31
|
+
|
32
|
+
// this tests use Hadoop's standalone mode
|
33
|
+
public class TestMapReduceExecutor
|
34
|
+
{
|
35
|
+
private EmbulkEmbed embulk;
|
36
|
+
private Random random = new RandomManager(System.currentTimeMillis()).getRandom();
|
37
|
+
|
38
|
+
@Before
|
39
|
+
public void createResources()
|
40
|
+
{
|
41
|
+
EmbulkEmbed.Bootstrap bootstrap = new EmbulkEmbed.Bootstrap();
|
42
|
+
|
43
|
+
ConfigSource systemConfig = bootstrap.getSystemConfigLoader().newConfigSource();
|
44
|
+
|
45
|
+
if (random.nextBoolean()) {
|
46
|
+
systemConfig.set("embulk_factory_class", MapReduceEmbulkFactory.class.getName());
|
47
|
+
} else {
|
48
|
+
systemConfig.set("embulk_factory_class", MapReduceEmbulkFactory2.class.getName());
|
49
|
+
}
|
50
|
+
|
51
|
+
bootstrap.setSystemConfig(systemConfig);
|
52
|
+
bootstrap.overrideModules(getModuleOverrides(systemConfig));
|
53
|
+
embulk = bootstrap.initialize();
|
54
|
+
}
|
55
|
+
|
56
|
+
@Test
|
57
|
+
public void testEmbulkMapper()
|
58
|
+
throws Exception
|
59
|
+
{
|
60
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_config.yml");
|
61
|
+
embulk.run(config);
|
62
|
+
// TODO compare input and output
|
63
|
+
}
|
64
|
+
|
65
|
+
@Test
|
66
|
+
public void testEmbulkPartitioningMapperReducer()
|
67
|
+
throws Exception
|
68
|
+
{
|
69
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_partitioning_config.yml");
|
70
|
+
embulk.run(config);
|
71
|
+
// TODO compare input and output
|
72
|
+
}
|
73
|
+
|
74
|
+
@Test
|
75
|
+
public void testInvalidConfigFiles()
|
76
|
+
throws Exception
|
77
|
+
{
|
78
|
+
try {
|
79
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_invalid_config_files_config.yml");
|
80
|
+
embulk.run(config);
|
81
|
+
fail();
|
82
|
+
}
|
83
|
+
catch (Throwable t) {
|
84
|
+
assertTrue(t instanceof ConfigException);
|
85
|
+
}
|
86
|
+
}
|
87
|
+
|
88
|
+
@Test
|
89
|
+
public void testInvalidPartitioning()
|
90
|
+
throws Exception
|
91
|
+
{
|
92
|
+
try {
|
93
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_invalid_partitioning_config.yml");
|
94
|
+
embulk.run(config);
|
95
|
+
fail();
|
96
|
+
}
|
97
|
+
catch (Throwable t) {
|
98
|
+
assertTrue(t instanceof ConfigException);
|
99
|
+
}
|
100
|
+
}
|
101
|
+
|
102
|
+
@Test
|
103
|
+
public void testInvalidReducers()
|
104
|
+
throws Exception
|
105
|
+
{
|
106
|
+
try {
|
107
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_invalid_reducers_config.yml");
|
108
|
+
embulk.run(config);
|
109
|
+
fail();
|
110
|
+
}
|
111
|
+
catch (Throwable t) {
|
112
|
+
assertTrue(t instanceof ConfigException);
|
113
|
+
}
|
114
|
+
}
|
115
|
+
|
116
|
+
@Test
|
117
|
+
public void testInvalidLibjars()
|
118
|
+
throws Exception
|
119
|
+
{
|
120
|
+
try {
|
121
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_invalid_libjars_config.yml");
|
122
|
+
embulk.run(config);
|
123
|
+
fail();
|
124
|
+
}
|
125
|
+
catch (Throwable t) {
|
126
|
+
assertTrue(t.getCause() instanceof FileNotFoundException);
|
127
|
+
}
|
128
|
+
}
|
129
|
+
|
130
|
+
@Test
|
131
|
+
public void testStopOnInvalidRecord()
|
132
|
+
throws Exception
|
133
|
+
{
|
134
|
+
try {
|
135
|
+
ConfigSource config = loadConfigSource(embulk.newConfigLoader(), "config/embulk_mapred_stop_on_invalid_record_config.yml");
|
136
|
+
embulk.run(config);
|
137
|
+
fail();
|
138
|
+
}
|
139
|
+
catch (Throwable t) {
|
140
|
+
t.printStackTrace();
|
141
|
+
assertTrue(t instanceof PartialExecutionException);
|
142
|
+
assertTrue(UserDataExceptions.isUserDataException(t.getCause()));
|
143
|
+
}
|
144
|
+
}
|
145
|
+
|
146
|
+
private static ConfigSource loadConfigSource(ConfigLoader configLoader, String yamlFile)
|
147
|
+
throws IOException
|
148
|
+
{
|
149
|
+
return configLoader.fromYaml(TestMapReduceExecutor.class.getClassLoader().getResourceAsStream(yamlFile));
|
150
|
+
}
|
151
|
+
|
152
|
+
private static Function<List<Module>, List<Module>> getModuleOverrides(final ConfigSource systemConfig)
|
153
|
+
{
|
154
|
+
return new Function<List<Module>, List<Module>>()
|
155
|
+
{
|
156
|
+
public List<Module> apply(List<Module> modules)
|
157
|
+
{
|
158
|
+
return overrideModules(modules, systemConfig);
|
159
|
+
}
|
160
|
+
};
|
161
|
+
}
|
162
|
+
|
163
|
+
private static List<Module> overrideModules(List<Module> modules, ConfigSource systemConfig)
|
164
|
+
{
|
165
|
+
return ImmutableList.of(Modules.override(Iterables.concat(modules, getAdditionalModules(systemConfig)))
|
166
|
+
.with(getOverrideModules(systemConfig)));
|
167
|
+
}
|
168
|
+
|
169
|
+
private static List<Module> getAdditionalModules(ConfigSource systemConfig)
|
170
|
+
{
|
171
|
+
return ImmutableList.<Module>of(new ExecutorPluginApplyModule());
|
172
|
+
}
|
173
|
+
|
174
|
+
private static List<Module> getOverrideModules(ConfigSource systemConfig)
|
175
|
+
{
|
176
|
+
return ImmutableList.<Module>of(new LoggerOverrideModule());
|
177
|
+
}
|
178
|
+
|
179
|
+
static class ExecutorPluginApplyModule
|
180
|
+
implements Module
|
181
|
+
{
|
182
|
+
@Override
|
183
|
+
public void configure(Binder binder)
|
184
|
+
{
|
185
|
+
registerPluginTo(binder, ExecutorPlugin.class, "mapreduce", MapReduceExecutor.class);
|
186
|
+
}
|
187
|
+
}
|
188
|
+
|
189
|
+
static class LoggerOverrideModule
|
190
|
+
implements Module
|
191
|
+
{
|
192
|
+
@Override
|
193
|
+
public void configure(Binder binder)
|
194
|
+
{
|
195
|
+
binder.bind(ILoggerFactory.class).toProvider(new Provider<ILoggerFactory>()
|
196
|
+
{
|
197
|
+
@Override
|
198
|
+
public ILoggerFactory get()
|
199
|
+
{
|
200
|
+
return new Log4jLoggerFactory(); // YARN has used log4j.
|
201
|
+
}
|
202
|
+
});
|
203
|
+
}
|
204
|
+
}
|
205
|
+
|
206
|
+
public static class MapReduceEmbulkFactory
|
207
|
+
{
|
208
|
+
public EmbulkEmbed.Bootstrap bootstrap(final ConfigSource systemConfig)
|
209
|
+
{
|
210
|
+
EmbulkEmbed.Bootstrap bootstrap = new EmbulkEmbed.Bootstrap();
|
211
|
+
bootstrap.setSystemConfig(systemConfig);
|
212
|
+
|
213
|
+
// add modules
|
214
|
+
//bootstrap.addModules(ImmutableList.<Module>of());
|
215
|
+
|
216
|
+
// override modules
|
217
|
+
bootstrap.overrideModules(new Function<List<Module>, List<Module>>()
|
218
|
+
{
|
219
|
+
public List<Module> apply(List<Module> modules)
|
220
|
+
{
|
221
|
+
return ImmutableList.of(Modules.override(modules).with(new LoggerOverrideModule()));
|
222
|
+
}
|
223
|
+
});
|
224
|
+
|
225
|
+
return bootstrap;
|
226
|
+
}
|
227
|
+
}
|
228
|
+
|
229
|
+
public static class MapReduceEmbulkFactory2
|
230
|
+
{
|
231
|
+
public EmbulkEmbed.Bootstrap bootstrap(final ConfigSource systemConfig, final ConfigSource executorParams)
|
232
|
+
{
|
233
|
+
EmbulkEmbed.Bootstrap bootstrap = new EmbulkEmbed.Bootstrap();
|
234
|
+
bootstrap.setSystemConfig(systemConfig);
|
235
|
+
|
236
|
+
// add modules
|
237
|
+
//bootstrap.addModules(ImmutableList.<Module>of());
|
238
|
+
|
239
|
+
// override modules
|
240
|
+
bootstrap.overrideModules(new Function<List<Module>, List<Module>>()
|
241
|
+
{
|
242
|
+
public List<Module> apply(List<Module> modules)
|
243
|
+
{
|
244
|
+
return ImmutableList.of(Modules.override(modules).with(new LoggerOverrideModule()));
|
245
|
+
}
|
246
|
+
});
|
247
|
+
|
248
|
+
return bootstrap;
|
249
|
+
}
|
250
|
+
}
|
251
|
+
}
|
@@ -0,0 +1,84 @@
|
|
1
|
+
package org.embulk.executor.mapreduce;
|
2
|
+
|
3
|
+
import org.embulk.spi.Buffer;
|
4
|
+
import org.embulk.spi.Page;
|
5
|
+
import org.embulk.spi.PageTestUtils;
|
6
|
+
import org.embulk.spi.Schema;
|
7
|
+
import org.embulk.spi.time.Timestamp;
|
8
|
+
import org.embulk.spi.type.Types;
|
9
|
+
import org.junit.Rule;
|
10
|
+
import org.junit.Test;
|
11
|
+
|
12
|
+
import java.io.ByteArrayInputStream;
|
13
|
+
import java.io.ByteArrayOutputStream;
|
14
|
+
import java.io.DataInputStream;
|
15
|
+
import java.io.DataOutputStream;
|
16
|
+
import java.io.IOException;
|
17
|
+
|
18
|
+
import static org.junit.Assert.assertEquals;
|
19
|
+
|
20
|
+
public class TestPageBufferWritable
|
21
|
+
{
|
22
|
+
@Rule
|
23
|
+
public MapReduceExecutorTestRuntime runtime = new MapReduceExecutorTestRuntime();
|
24
|
+
|
25
|
+
@Test
|
26
|
+
public void writeAndRead() throws IOException
|
27
|
+
{
|
28
|
+
Schema schema = Schema.builder()
|
29
|
+
.add("c0", Types.BOOLEAN)
|
30
|
+
.add("c1", Types.LONG)
|
31
|
+
.add("c2", Types.DOUBLE)
|
32
|
+
.add("c3", Types.STRING)
|
33
|
+
.add("c4", Types.TIMESTAMP)
|
34
|
+
.build();
|
35
|
+
|
36
|
+
for (Page page : PageTestUtils.buildPage(runtime.getBufferAllocator(), schema,
|
37
|
+
true, 2L, 3.0D, "45", Timestamp.ofEpochMilli(678L),
|
38
|
+
true, 2L, 3.0D, "45", Timestamp.ofEpochMilli(678L))) {
|
39
|
+
|
40
|
+
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
41
|
+
try (DataOutputStream dout = new DataOutputStream(out)) {
|
42
|
+
PageWritable pw1 = new PageWritable();
|
43
|
+
pw1.set(page);
|
44
|
+
pw1.write(dout);
|
45
|
+
|
46
|
+
BufferWritable bw1 = new BufferWritable();
|
47
|
+
bw1.set(page.buffer());
|
48
|
+
bw1.write(dout);
|
49
|
+
dout.flush();
|
50
|
+
|
51
|
+
try (DataInputStream din = new DataInputStream(new ByteArrayInputStream(out.toByteArray()))) {
|
52
|
+
PageWritable pw2 = new PageWritable();
|
53
|
+
pw2.readFields(din);
|
54
|
+
|
55
|
+
BufferWritable bw2 = new BufferWritable();
|
56
|
+
bw2.readFields(din);
|
57
|
+
|
58
|
+
assertPageWritableEquals(pw1, pw2);
|
59
|
+
assertBufferWritableEquals(bw1, bw2);
|
60
|
+
}
|
61
|
+
}
|
62
|
+
}
|
63
|
+
}
|
64
|
+
}
|
65
|
+
|
66
|
+
static void assertPageWritableEquals(PageWritable pw1, PageWritable pw2)
|
67
|
+
{
|
68
|
+
Page p1 = pw1.get();
|
69
|
+
Page p2 = pw2.get();
|
70
|
+
|
71
|
+
assertEquals(p1.getStringReferences(), p2.getStringReferences());
|
72
|
+
assertBufferEquals(p1.buffer(), p2.buffer());
|
73
|
+
}
|
74
|
+
|
75
|
+
static void assertBufferWritableEquals(BufferWritable bw1, BufferWritable bw2)
|
76
|
+
{
|
77
|
+
assertBufferEquals(bw1.get(), bw2.get());
|
78
|
+
}
|
79
|
+
|
80
|
+
static void assertBufferEquals(Buffer b1, Buffer b2)
|
81
|
+
{
|
82
|
+
assertEquals(b1, b2);
|
83
|
+
}
|
84
|
+
}
|
@@ -0,0 +1,222 @@
|
|
1
|
+
package org.embulk.executor.mapreduce;
|
2
|
+
|
3
|
+
import org.embulk.config.ConfigException;
|
4
|
+
import org.embulk.config.ConfigSource;
|
5
|
+
import org.embulk.spi.Column;
|
6
|
+
import org.embulk.spi.Page;
|
7
|
+
import org.embulk.spi.PageReader;
|
8
|
+
import org.embulk.spi.PageTestUtils;
|
9
|
+
import org.embulk.spi.Schema;
|
10
|
+
import org.embulk.spi.time.Timestamp;
|
11
|
+
import org.embulk.spi.type.Types;
|
12
|
+
import org.embulk.executor.mapreduce.TimestampPartitioning.LongUnixTimestampPartitioner;
|
13
|
+
import org.embulk.executor.mapreduce.TimestampPartitioning.TimestampPartitioner;
|
14
|
+
import org.embulk.executor.mapreduce.TimestampPartitioning.Unit;
|
15
|
+
import org.embulk.executor.mapreduce.TimestampPartitioning.UnixTimestampUnit;
|
16
|
+
import org.junit.Before;
|
17
|
+
import org.junit.Rule;
|
18
|
+
import org.junit.Test;
|
19
|
+
|
20
|
+
import java.io.IOException;
|
21
|
+
import java.util.ArrayList;
|
22
|
+
import java.util.Arrays;
|
23
|
+
import java.util.List;
|
24
|
+
|
25
|
+
import static org.junit.Assert.*;
|
26
|
+
|
27
|
+
public class TestTimestampPartitioning
|
28
|
+
{
|
29
|
+
@Rule
|
30
|
+
public MapReduceExecutorTestRuntime runtime = new MapReduceExecutorTestRuntime();
|
31
|
+
|
32
|
+
private TimestampPartitioning tp;
|
33
|
+
|
34
|
+
@Before
|
35
|
+
public void createTimestampPartitioning()
|
36
|
+
{
|
37
|
+
tp = new TimestampPartitioning();
|
38
|
+
}
|
39
|
+
|
40
|
+
@Test
|
41
|
+
public void validateConfigSource()
|
42
|
+
throws IOException
|
43
|
+
{
|
44
|
+
{ // specified column is not included in schema
|
45
|
+
ConfigSource config = runtime.getExec().newConfigSource()
|
46
|
+
.set("column", "_c0").set("unit", "hour").set("timezone", "UTC");
|
47
|
+
Schema schema = Schema.builder().add("not_included", Types.TIMESTAMP).build();
|
48
|
+
|
49
|
+
try {
|
50
|
+
tp.configure(config, schema, 0);
|
51
|
+
fail();
|
52
|
+
} catch (Throwable t) {
|
53
|
+
assertTrue(t instanceof ConfigException);
|
54
|
+
}
|
55
|
+
}
|
56
|
+
|
57
|
+
{ // only UTC is supported now
|
58
|
+
ConfigSource config = runtime.getExec().newConfigSource()
|
59
|
+
.set("column", "_c0").set("unit", "hour").set("timezone", "PDT");
|
60
|
+
Schema schema = Schema.builder().add("_c0", Types.TIMESTAMP).build();
|
61
|
+
|
62
|
+
try {
|
63
|
+
tp.configure(config, schema, 0);
|
64
|
+
fail();
|
65
|
+
} catch (Throwable t) {
|
66
|
+
assertTrue(t instanceof ConfigException);
|
67
|
+
}
|
68
|
+
}
|
69
|
+
|
70
|
+
{ // the unit is only 'hour' or 'day'
|
71
|
+
ConfigSource config = runtime.getExec().newConfigSource()
|
72
|
+
.set("column", "_c0").set("unit", "invalid").set("timezone", "UTC");
|
73
|
+
Schema schema = Schema.builder().add("_c0", Types.TIMESTAMP).build();
|
74
|
+
|
75
|
+
try {
|
76
|
+
tp.configure(config, schema, 0);
|
77
|
+
fail();
|
78
|
+
} catch (Throwable t) {
|
79
|
+
assertTrue(t instanceof ConfigException);
|
80
|
+
}
|
81
|
+
}
|
82
|
+
|
83
|
+
{ // the column type is only timestamp or long
|
84
|
+
ConfigSource config = runtime.getExec().newConfigSource()
|
85
|
+
.set("column", "_c0").set("unit", "hour").set("timezone", "UTC");
|
86
|
+
Schema schema = Schema.builder().add("_c0", Types.STRING).build();
|
87
|
+
|
88
|
+
try {
|
89
|
+
tp.configure(config, schema, 0);
|
90
|
+
fail();
|
91
|
+
} catch (Throwable t) {
|
92
|
+
assertTrue(t instanceof ConfigException);
|
93
|
+
}
|
94
|
+
}
|
95
|
+
|
96
|
+
{ // if the column type is long, unix_timestamp_unit is required
|
97
|
+
ConfigSource config = runtime.getExec().newConfigSource()
|
98
|
+
.set("column", "_c0").set("unit", "hour").set("timezone", "UTC").set("unix_timestamp_unit", "invalid");
|
99
|
+
Schema schema = Schema.builder().add("_c0", Types.LONG).build();
|
100
|
+
|
101
|
+
try {
|
102
|
+
tp.configure(config, schema, 0);
|
103
|
+
fail();
|
104
|
+
} catch (Throwable t) {
|
105
|
+
assertTrue(t instanceof ConfigException);
|
106
|
+
}
|
107
|
+
}
|
108
|
+
}
|
109
|
+
|
110
|
+
@Test
|
111
|
+
public void comparePartitionKeys()
|
112
|
+
throws Exception
|
113
|
+
{
|
114
|
+
List<PartitionKey> pks = new ArrayList<>();
|
115
|
+
|
116
|
+
Column c0 = new Column(0, "c0", Types.LONG);
|
117
|
+
Column c1 = new Column(1, "c1", Types.TIMESTAMP);
|
118
|
+
Schema schema = new Schema(Arrays.asList(c0, c1));
|
119
|
+
|
120
|
+
LongUnixTimestampPartitioner lp = new LongUnixTimestampPartitioner(c0, Unit.HOUR, UnixTimestampUnit.SEC);
|
121
|
+
TimestampPartitioner tp = new TimestampPartitioner(c1, Unit.HOUR);
|
122
|
+
|
123
|
+
long timeWindow = System.currentTimeMillis()/1000/3600*3600;
|
124
|
+
PageReader r = new PageReader(schema);
|
125
|
+
for (Page page : PageTestUtils.buildPage(runtime.getBufferAllocator(), schema,
|
126
|
+
timeWindow, Timestamp.ofEpochSecond(timeWindow),
|
127
|
+
timeWindow+1, Timestamp.ofEpochSecond(timeWindow+1),
|
128
|
+
timeWindow+3600, Timestamp.ofEpochSecond(timeWindow+3600),
|
129
|
+
timeWindow+3600+1, Timestamp.ofEpochSecond(timeWindow+3600+1),
|
130
|
+
timeWindow+2*3600, Timestamp.ofEpochSecond(timeWindow+2*3600),
|
131
|
+
timeWindow+2*3600+1, Timestamp.ofEpochSecond(timeWindow+2*3600+1)
|
132
|
+
)){
|
133
|
+
r.setPage(page);
|
134
|
+
while (r.nextRecord()) {
|
135
|
+
pks.add(lp.updateKey(r).clone());
|
136
|
+
pks.add(tp.updateKey(r).clone());
|
137
|
+
}
|
138
|
+
}
|
139
|
+
|
140
|
+
for (int i = 0; i < pks.size(); i += 2) {
|
141
|
+
assertTrue(pks.get(i).equals(pks.get(i+1))); // long(tw) == timestamp(tw)
|
142
|
+
}
|
143
|
+
for (int i = 0; i < pks.size() - 4; i += 4) {
|
144
|
+
assertTrue(pks.get(i).equals(pks.get(i+2))); // long(tw) == long (tw+1)
|
145
|
+
}
|
146
|
+
for (int i = 0; i < pks.size() - 4; i += 4) {
|
147
|
+
assertFalse(pks.get(i).equals(pks.get(i+4))); // long(tw) != long (tw+3600)
|
148
|
+
}
|
149
|
+
}
|
150
|
+
|
151
|
+
@Test
|
152
|
+
public void checkUnit()
|
153
|
+
{
|
154
|
+
long hourlyTimeWindow = System.currentTimeMillis() / 1000 / 3600 * 3600;
|
155
|
+
long dailyTimeWindow = System.currentTimeMillis() / 1000 / 86400 * 86600;
|
156
|
+
|
157
|
+
// hour
|
158
|
+
{
|
159
|
+
assertEquals(Unit.HOUR, Unit.of("hour"));
|
160
|
+
assertTrue(Unit.HOUR.utcPartition(hourlyTimeWindow) == Unit.HOUR.utcPartition(hourlyTimeWindow + 1));
|
161
|
+
assertTrue(Unit.HOUR.utcPartition(hourlyTimeWindow) != Unit.HOUR.utcPartition(hourlyTimeWindow + 3600));
|
162
|
+
}
|
163
|
+
|
164
|
+
// day
|
165
|
+
{
|
166
|
+
assertEquals(Unit.DAY, Unit.of("day"));
|
167
|
+
assertTrue(Unit.DAY.utcPartition(dailyTimeWindow) == Unit.DAY.utcPartition(dailyTimeWindow + 1));
|
168
|
+
assertTrue(Unit.DAY.utcPartition(dailyTimeWindow) != Unit.DAY.utcPartition(dailyTimeWindow + 86400));
|
169
|
+
}
|
170
|
+
|
171
|
+
// invalid_unit
|
172
|
+
{
|
173
|
+
try {
|
174
|
+
Unit.of("invalid_unit");
|
175
|
+
fail();
|
176
|
+
} catch (Exception e) {
|
177
|
+
assertTrue(e instanceof ConfigException);
|
178
|
+
}
|
179
|
+
}
|
180
|
+
}
|
181
|
+
|
182
|
+
@Test
|
183
|
+
public void checkUnixTimestampUnit()
|
184
|
+
{
|
185
|
+
long currentNano = System.nanoTime();
|
186
|
+
long currentSec = currentNano / 1000000000;
|
187
|
+
|
188
|
+
// sec
|
189
|
+
{
|
190
|
+
assertEquals(UnixTimestampUnit.SEC, UnixTimestampUnit.of("sec"));
|
191
|
+
assertEquals(currentSec, UnixTimestampUnit.SEC.toSeconds(currentNano / 1000000000));
|
192
|
+
}
|
193
|
+
|
194
|
+
// milli
|
195
|
+
{
|
196
|
+
assertEquals(UnixTimestampUnit.MILLI, UnixTimestampUnit.of("milli"));
|
197
|
+
assertEquals(currentSec, UnixTimestampUnit.MILLI.toSeconds(currentNano / 1000000));
|
198
|
+
}
|
199
|
+
|
200
|
+
// micro
|
201
|
+
{
|
202
|
+
assertEquals(UnixTimestampUnit.MICRO, UnixTimestampUnit.of("micro"));
|
203
|
+
assertEquals(currentSec, UnixTimestampUnit.MICRO.toSeconds(currentNano / 1000));
|
204
|
+
}
|
205
|
+
|
206
|
+
// nano
|
207
|
+
{
|
208
|
+
assertEquals(UnixTimestampUnit.NANO, UnixTimestampUnit.of("nano"));
|
209
|
+
assertEquals(currentSec, UnixTimestampUnit.NANO.toSeconds(currentNano));
|
210
|
+
}
|
211
|
+
|
212
|
+
// invalid_unit
|
213
|
+
{
|
214
|
+
try {
|
215
|
+
UnixTimestampUnit.of("invalid_unit");
|
216
|
+
fail();
|
217
|
+
} catch (Exception e) {
|
218
|
+
assertTrue(e instanceof ConfigException);
|
219
|
+
}
|
220
|
+
}
|
221
|
+
}
|
222
|
+
}
|