embulk-parser-firebase_avro 0.1.1 → 0.1.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 9335540798899c6d98b90f79d9afeddf525cf50d
4
- data.tar.gz: 700ea6f57bbc2699431aefe46627bb90b7edfb23
3
+ metadata.gz: b4ac8ed35dad7dbbde18928b68dca357b9fc5784
4
+ data.tar.gz: 898d4040eaf4d63ed5b2a5dbd5de8d361a33c962
5
5
  SHA512:
6
- metadata.gz: a2fb05f739e76c481b62bd8b909fd1d4617d48642b1ae85516abe1a12aa9a5cca547ee3be44ce2087740fe5eebd5ade48058933fc533ec080230b56adc4e09d4
7
- data.tar.gz: 8f5ffc78f5ebd333a787a14b77d6a3d7f833fa782b10397a7fe9913ee80a38b5f3b4341b41434ec5a88f78937ef705dad09bcf428664da093a4d5d9771d98f96
6
+ metadata.gz: a07439fba8c76371a24d7d981b637b5fb9aed5b45885f10280923c44bf968acb29d5f2ea8923a82848257a3621d68425514f17c81082a0b060e65479bba94be5
7
+ data.tar.gz: 2b2582964a00485b7067296a6bb12d72b90173e06c6cadc7a4b6bdf5e5ce08009500f0258e774f69c2ebab87c7a98f11e2f422faa4f96f376aa3f3c302120c80
data/build.gradle CHANGED
@@ -13,7 +13,7 @@ configurations {
13
13
  provided
14
14
  }
15
15
 
16
- version = "0.1.1"
16
+ version = "0.1.2"
17
17
 
18
18
  sourceCompatibility = 1.7
19
19
  targetCompatibility = 1.7
data/build.sbt CHANGED
@@ -1,29 +1,32 @@
1
- lazy val root = (project in file(".")).
2
- settings(
3
- inThisBuild(List(
1
+ enablePlugins(ScalafmtPlugin)
2
+
3
+ lazy val root = (project in file(".")).settings(
4
+ inThisBuild(
5
+ List(
4
6
  organization := "com.example",
5
7
  scalaVersion := "2.11.11",
6
- version := "0.1.0-SNAPSHOT"
8
+ version := "0.1.0-SNAPSHOT"
7
9
  )),
8
- name := "embulk-parser-firebase_avro"
9
- )
10
+ name := "embulk-parser-firebase_avro",
11
+ scalafmtOnCompile in ThisBuild := true,
12
+ scalafmtTestOnCompile in ThisBuild := true
13
+ )
10
14
 
11
15
  enablePlugins(ScalafmtPlugin)
12
16
 
13
17
  resolvers += Resolver.jcenterRepo
14
18
  resolvers += Resolver.sonatypeRepo("releases")
15
19
 
16
-
17
20
  lazy val circeVersion = "0.8.0"
18
21
  libraryDependencies ++= Seq(
19
- "com.sksamuel.avro4s" %% "avro4s-core" % "1.6.4",
20
- "org.jruby" % "jruby-complete" % "1.6.5",
21
- "org.embulk" % "embulk-core" % "0.8.22",
22
- "com.chuusai" %% "shapeless" % "2.3.2",
23
- "io.circe" %% "circe-core" % circeVersion,
24
- "io.circe" %% "circe-generic" % circeVersion,
25
- "org.scalacheck" %% "scalacheck" % "1.13.4" % Test,
26
- "org.scalatest" %% "scalatest" % "3.0.1" % Test,
27
- "org.scalamock" %% "scalamock-scalatest-support" % "3.6.0" % Test,
28
- "com.github.alexarchambault" %% "scalacheck-shapeless_1.13" % "1.1.5" % Test
29
- )
22
+ "com.sksamuel.avro4s" %% "avro4s-core" % "1.6.4",
23
+ "org.jruby" % "jruby-complete" % "1.6.5",
24
+ "org.embulk" % "embulk-core" % "0.8.22",
25
+ "com.chuusai" %% "shapeless" % "2.3.2",
26
+ "io.circe" %% "circe-core" % circeVersion,
27
+ "io.circe" %% "circe-generic" % circeVersion,
28
+ "org.scalacheck" %% "scalacheck" % "1.13.4" % Test,
29
+ "org.scalatest" %% "scalatest" % "3.0.1" % Test,
30
+ "org.scalamock" %% "scalamock-scalatest-support" % "3.6.0" % Test,
31
+ "com.github.alexarchambault" %% "scalacheck-shapeless_1.13" % "1.1.5" % Test
32
+ )
data/project/plugins.sbt CHANGED
@@ -1,3 +1,3 @@
1
1
  addSbtPlugin("com.julianpeeters" % "sbt-avrohugger" % "0.16.0")
2
- addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "0.4")
2
+ addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.7")
3
3
  addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.0-RC6")
data/settings.gradle ADDED
@@ -0,0 +1 @@
1
+ rootProject.name = 'embulk-parser-firebase_avro'
@@ -1,11 +1,10 @@
1
1
  package org.embulk.parser.firebase_avro
2
2
 
3
- import java.io.InputStream
3
+ import com.google.common.io.ByteStreams
4
4
 
5
5
  import scala.collection.JavaConverters._
6
6
  import com.sksamuel.avro4s.AvroInputStream
7
7
  import io.circe.Json
8
- import org.apache.commons.compress.utils.IOUtils
9
8
  import org.embulk.config.ConfigSource
10
9
  import org.embulk.config.TaskSource
11
10
  import org.embulk.parser.firebase_avro.column.Columns
@@ -22,45 +21,46 @@ object FirebaseAvroParserPlugin {
22
21
  }
23
22
 
24
23
  class FirebaseAvroParserPlugin extends ParserPlugin {
24
+ val logger = Exec.getLogger(classOf[FirebaseAvroParserPlugin])
25
25
  override def transaction(config: ConfigSource, control: ParserPlugin.Control): Unit = {
26
26
  val task = config.loadConfig(classOf[PluginTask])
27
27
  control.run(task.dump, FirebaseAvroParserPlugin.buildColumn())
28
28
  }
29
29
 
30
- override def run(taskSource: TaskSource, schema: Schema, input: FileInput, output: PageOutput): Unit =
31
- LoanPattern(new FileInputInputStream(input)) { efis =>
32
- LoanPattern(new PageBuilder(Exec.getBufferAllocator, schema, output)) { pb =>
33
- while (efis.nextFile()) {
34
- addRecords(efis, pb)
30
+ override def run(taskSource: TaskSource, schema: Schema, input: FileInput, output: PageOutput): Unit = {
31
+ LoanPattern(new PageBuilder(Exec.getBufferAllocator, schema, output)) { pb =>
32
+ while (input.nextFile()) {
33
+ val bytes = ByteStreams.toByteArray(new FileInputInputStream(input))
34
+ AvroInputStream.data[Root](bytes).iterator().toList.foreach { record =>
35
+ addRecords(pb, record)
35
36
  }
36
- pb.finish()
37
37
  }
38
+ pb.finish()
38
39
  }
40
+ }
39
41
 
40
- def addRecords(is: InputStream, pb: PageBuilder): Unit =
41
- AvroInputStream.data[Root](IOUtils.toByteArray(is)).iterator().foreach { record =>
42
- Parser(record).foreach { rows =>
43
- rows.foreach {
44
- case ValueHolder(c, Some(x: Int)) =>
45
- pb.setLong(c, x)
46
- case ValueHolder(c, Some(x: Long)) =>
47
- pb.setLong(c, x)
48
- case ValueHolder(c, Some(x: Double)) =>
49
- pb.setDouble(c, x)
50
- case ValueHolder(c, Some(x: Float)) =>
51
- pb.setDouble(c, x)
52
- case ValueHolder(c, Some(x: Boolean)) =>
53
- pb.setBoolean(c, x)
54
- case ValueHolder(c, Some(x: String)) =>
55
- pb.setString(c, x)
56
- case ValueHolder(c, Some(x: Json)) =>
57
- pb.setJson(c, new JsonParser().parse(x.noSpaces))
58
- case ValueHolder(c, Some(x: Timestamp)) =>
59
- pb.setTimestamp(c, x)
60
- case ValueHolder(c, None) =>
61
- pb.setNull(c)
62
- }
63
- pb.addRecord()
42
+ def addRecords(pb: PageBuilder, record: Root): Unit =
43
+ Parser(record).foreach { rows =>
44
+ rows.foreach {
45
+ case ValueHolder(c, Some(x: Int)) =>
46
+ pb.setLong(c, x)
47
+ case ValueHolder(c, Some(x: Long)) =>
48
+ pb.setLong(c, x)
49
+ case ValueHolder(c, Some(x: Double)) =>
50
+ pb.setDouble(c, x)
51
+ case ValueHolder(c, Some(x: Float)) =>
52
+ pb.setDouble(c, x)
53
+ case ValueHolder(c, Some(x: Boolean)) =>
54
+ pb.setBoolean(c, x)
55
+ case ValueHolder(c, Some(x: String)) =>
56
+ pb.setString(c, x)
57
+ case ValueHolder(c, Some(x: Json)) =>
58
+ pb.setJson(c, new JsonParser().parse(x.noSpaces))
59
+ case ValueHolder(c, Some(x: Timestamp)) =>
60
+ pb.setTimestamp(c, x)
61
+ case ValueHolder(c, None) =>
62
+ pb.setNull(c)
64
63
  }
64
+ pb.addRecord()
65
65
  }
66
66
  }
@@ -12,9 +12,14 @@ object Parser {
12
12
 
13
13
  def apply(record: Root): Seq[Seq[ValueHolder[_]]] = {
14
14
  val userFields = userDims(record.user_dim.getOrElse(sys.error("could not get user")))
15
- if (record.event_dim.isEmpty) sys.error("empty event")
16
- record.event_dim.map {
17
- userFields ++ eventDims(_)
15
+ if (record.event_dim.isEmpty) {
16
+ record.copy(event_dim = List(Event_Dim.empty)).event_dim.map {
17
+ userFields ++ eventDims(_)
18
+ }
19
+ } else {
20
+ record.event_dim.map {
21
+ userFields ++ eventDims(_)
22
+ }
18
23
  }
19
24
  }
20
25
 
@@ -6,3 +6,7 @@ case class Event_Dim(date: Option[String],
6
6
  timestamp_micros: Option[Long],
7
7
  previous_timestamp_micros: Option[Long],
8
8
  value_in_usd: Option[Double])
9
+
10
+ object Event_Dim {
11
+ val empty = Event_Dim(None, None, Nil, None, None, None)
12
+ }
@@ -8,11 +8,11 @@ class ColumnsTest extends FlatSpec with MustMatchers {
8
8
  "columuns" should "be indexing" in {
9
9
  val columns = Columns()
10
10
  val summery = columns.indices.sum
11
- columns.map(_.embulkColumn).map(_.getIndex).sum should be (summery)
11
+ columns.map(_.embulkColumn).map(_.getIndex).sum should be(summery)
12
12
  }
13
13
 
14
14
  "columns" should "be finding" in {
15
- Columns.find("user_dim" , "first_open_timestamp_micros")
15
+ Columns.find("user_dim", "first_open_timestamp_micros")
16
16
  }
17
17
 
18
18
  }
@@ -8,12 +8,12 @@ import org.scalatest._
8
8
  class EventParmsJsonSerializerTest extends FlatSpec with MustMatchers {
9
9
 
10
10
  "parameter" should "be encoding" in {
11
- val string = Params(Option("key_string") , Option(Value(string_value = Some("abc"))))
12
- val int = Params(Option("key_int") , Option(Value(int_value = Some(1))))
13
- val double = Params(Option("key_double") , Option(Value(double_value = Some(10D))))
14
- val float = Params(Option("key_float") , Option(Value(float_value = Some(10F))))
15
- val given = List(string , int , double , float)
16
- val that = EventParmsJsonSerializer(given).map(_.noSpaces)
11
+ val string = Params(Option("key_string"), Option(Value(string_value = Some("abc"))))
12
+ val int = Params(Option("key_int"), Option(Value(int_value = Some(1))))
13
+ val double = Params(Option("key_double"), Option(Value(double_value = Some(10D))))
14
+ val float = Params(Option("key_float"), Option(Value(float_value = Some(10F))))
15
+ val given = List(string, int, double, float)
16
+ val that = EventParmsJsonSerializer(given).map(_.noSpaces)
17
17
  that mustBe Some("{\"key_float\":10.0,\"key_int\":1,\"key_string\":\"abc\",\"key_double\":10.0}")
18
18
  }
19
19
  }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: embulk-parser-firebase_avro
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - smdmts
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-07-12 00:00:00.000000000 Z
11
+ date: 2017-07-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -59,6 +59,7 @@ files:
59
59
  - lib/embulk/parser/firebase_avro.rb
60
60
  - project/build.properties
61
61
  - project/plugins.sbt
62
+ - settings.gradle
62
63
  - src/main/scala/org/embulk/parser/firebase_avro/FirebaseAvroParserPlugin.scala
63
64
  - src/main/scala/org/embulk/parser/firebase_avro/LoanPattern.scala
64
65
  - src/main/scala/org/embulk/parser/firebase_avro/Parser.scala
@@ -99,7 +100,7 @@ files:
99
100
  - classpath/circe-generic_2.11-0.8.0.jar
100
101
  - classpath/circe-numbers_2.11-0.8.0.jar
101
102
  - classpath/commons-compress-1.8.1.jar
102
- - classpath/embulk-parser-firebase_avro-0.1.1.jar
103
+ - classpath/embulk-parser-firebase_avro-0.1.2.jar
103
104
  - classpath/jackson-core-asl-1.9.13.jar
104
105
  - classpath/jackson-mapper-asl-1.9.13.jar
105
106
  - classpath/machinist_2.11-0.6.1.jar