mortar 0.15.32 → 0.15.33

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ M2EzNDc4OGVmZTc1MGE2MWI5MWQ2MjBmZTBmZjBiNzIwMWRiNGQ4Nw==
5
+ data.tar.gz: !binary |-
6
+ MWY2Yzk3MTkxNzk2MmM3MTg2NjZhODA4YTZmNWQ1Y2QwMTRkNWQyMQ==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ Njg5ZTc0NTQxOTg0NTg2ZWJiZTJjNTg3Njg3OGI2NzU1ZDNjYzdiNTJjNjBi
10
+ NzU2MTlhOTJmZWUyN2IyMDE3MzI2YjY5ZWJlNzdiZTU5ZDM4NmExNWNhYmVm
11
+ MGZlODNhYzY4ZTNhYjFiOGRmOTUwMTAzM2NmOGYzNzdkMjQwMzU=
12
+ data.tar.gz: !binary |-
13
+ OTU4YmJiYWUxODhkYjliNDMwNzllNGU3Y2ExNGZjMTRjMzRkNDQ5MjAzNjkw
14
+ ZWM2NzdiNjU4ZWRiZWE5OTJkMDRmYjM5MjkxNjczYjQwZWIwMWQ4NWU4NDdl
15
+ ZTdhMGUwYTIzZDk5YzExNWU0MzExNmMzYTNlOGRiOTYzMmJmNzc=
@@ -103,6 +103,10 @@ class Mortar::Command::Base
103
103
  end
104
104
  param_list
105
105
  end
106
+
107
+ def spark_script_arguments
108
+ invalid_arguments.join(" ")
109
+ end
106
110
 
107
111
  def luigi_parameters
108
112
  parameters = []
@@ -447,6 +451,18 @@ protected
447
451
  pigscript or controlscript
448
452
  end
449
453
 
454
+ def validate_sparkscript!(sparkscript_name)
455
+ shortened_script_name = File.basename(sparkscript_name)
456
+
457
+ unless sparkscript = project.sparkscripts[shortened_script_name]
458
+ available_scripts = project.sparkscripts.none? ? "No sparkscripts found" : "Available sparkscripts:\n#{project.sparkscripts.collect{|k,v| v.executable_path}.sort.join("\n")}"
459
+ error("Unable to find sparkscript #{sparkscript_name}\n#{available_scripts}")
460
+ end
461
+ #While validating we can load the defaults that are relevant to this script.
462
+ load_defaults(sparkscript_name)
463
+ sparkscript
464
+ end
465
+
450
466
  def validate_luigiscript!(luigiscript_name)
451
467
  shortened_script_name = File.basename(luigiscript_name, ".*")
452
468
  unless luigiscript = project.luigiscripts[shortened_script_name]
@@ -27,11 +27,11 @@ class Mortar::Command::Clusters < Mortar::Command::Base
27
27
  def index
28
28
  validate_arguments!
29
29
 
30
- clusters = api.get_clusters().body['clusters']
30
+ clusters = api.get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__ALL).body['clusters']
31
31
  if not clusters.empty?
32
32
  display_table(clusters,
33
- %w( cluster_id size status_description cluster_type_description start_timestamp duration),
34
- ['cluster_id', 'Size (# of Nodes)', 'Status', 'Type', 'Start Timestamp', 'Elapsed Time'])
33
+ %w( cluster_id size status_description cluster_type_description start_timestamp duration cluster_backend_description),
34
+ ['cluster_id', 'Size (# of Nodes)', 'Status', 'Type', 'Start Timestamp', 'Elapsed Time', 'Cluster Backend'])
35
35
  else
36
36
  display("There are no running or recent clusters")
37
37
  end
@@ -114,7 +114,7 @@ class Mortar::Command::Jobs < Mortar::Command::Base
114
114
  end
115
115
 
116
116
  unless options[:clusterid] || options[:clustersize]
117
- clusters = api.get_clusters().body['clusters']
117
+ clusters = api.get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_1).body['clusters']
118
118
 
119
119
  largest_free_cluster = clusters.select{ |c| \
120
120
  c['running_jobs'].length == 0 && c['status_code'] == Mortar::API::Clusters::STATUS_RUNNING }.
@@ -0,0 +1,132 @@
1
+ #
2
+ # Copyright 2015 Mortar Data Inc.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ require "mortar/command/base"
18
+ require "time"
19
+
20
+ # run spark jobs
21
+ #
22
+ class Mortar::Command::Spark < Mortar::Command::Base
23
+
24
+ include Mortar::Git
25
+
26
+ # spark SCRIPT
27
+ #
28
+ # Run a spark job.
29
+ #
30
+ # -c, --clusterid CLUSTERID # Run job on an existing cluster with ID of CLUSTERID (optional)
31
+ # -s, --clustersize NUMNODES # Run job on a new cluster, with NUMNODES nodes (optional; must be >= 2 if provided)
32
+ # -1, --singlejobcluster # Stop the cluster after job completes. (Default: false--cluster can be used for other jobs, and will shut down after 1 hour of inactivity)
33
+ # -2, --permanentcluster # Don't automatically stop the cluster after it has been idle for an hour (Default: false--cluster will be shut down after 1 hour of inactivity)
34
+ # -3, --spot # Use spot instances for this cluster (Default: false, only applicable to new clusters)
35
+ # -P, --project PROJECTNAME # Use a project that is not checked out in the current directory. Runs code from project's master branch in GitHub rather than snapshotting local code.
36
+ # -B, --branch BRANCHNAME # Used with --project to specify a non-master branch
37
+ #
38
+ # Examples:
39
+ #
40
+ # Run the classify_text sparkscript:
41
+ # $ mortar spark sparkscripts/classify_text.py
42
+ #
43
+ # Run the classify_text sparkscript with 3 script arguments (input location, output location, tuning parameter):
44
+ # $ mortar spark sparkscripts/classify_text.py s3://your-bucket/input s3://your-bucket/output 100
45
+ #
46
+ def index
47
+ script_name = shift_argument
48
+ unless script_name
49
+ error("Usage: mortar spark SCRIPT\nMust specify SCRIPT.")
50
+ end
51
+
52
+ if options[:project]
53
+ project_name = options[:project]
54
+ else
55
+ project_name = project.name
56
+ script = validate_sparkscript!(script_name)
57
+ script_name = script.name
58
+ end
59
+
60
+ script_arguments = spark_script_arguments()
61
+
62
+ if options[:clusterid]
63
+ [:clustersize, :singlejobcluster, :permanentcluster].each do |opt|
64
+ unless options[opt].nil?
65
+ error("Option #{opt.to_s} cannot be set when running a job on an existing cluster (with --clusterid option)")
66
+ end
67
+ end
68
+ end
69
+
70
+ if options[:project]
71
+ if options[:branch]
72
+ git_ref = options[:branch]
73
+ else
74
+ git_ref = "master"
75
+ end
76
+ else
77
+ git_ref = sync_code_with_cloud()
78
+ end
79
+
80
+ unless options[:clusterid] || options[:clustersize]
81
+ clusters = api.get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_2).body['clusters']
82
+
83
+ largest_free_cluster = clusters.select{ |c| \
84
+ c['running_jobs'].length == 0 && c['status_code'] == Mortar::API::Clusters::STATUS_RUNNING }.
85
+ max_by{|c| c['size']}
86
+
87
+ if largest_free_cluster.nil?
88
+ options[:clustersize] = 2
89
+ display("Defaulting to running job on new cluster of size 2")
90
+ else
91
+ options[:clusterid] = largest_free_cluster['cluster_id']
92
+ display("Defaulting to running job on largest existing free cluster, id = " +
93
+ largest_free_cluster['cluster_id'] + ", size = " + largest_free_cluster['size'].to_s)
94
+ end
95
+ end
96
+
97
+ response = action("Requesting job execution") do
98
+ if options[:clustersize]
99
+ if options[:singlejobcluster] && options[:permanentcluster]
100
+ error("Cannot declare cluster as both --singlejobcluster and --permanentcluster")
101
+ end
102
+ cluster_size = options[:clustersize].to_i
103
+ cluster_type = Mortar::API::Jobs::CLUSTER_TYPE__PERSISTENT
104
+ if options[:singlejobcluster]
105
+ cluster_type = Mortar::API::Jobs::CLUSTER_TYPE__SINGLE_JOB
106
+ elsif options[:permanentcluster]
107
+ cluster_type = Mortar::API::Jobs::CLUSTER_TYPE__PERMANENT
108
+ end
109
+ use_spot_instances = options[:spot] || false
110
+ api.post_spark_job_new_cluster(project_name, script_name, git_ref, cluster_size,
111
+ :project_script_path => script.rel_path,
112
+ :script_arguments => script_arguments,
113
+ :cluster_type => cluster_type,
114
+ :use_spot_instances => use_spot_instances).body
115
+ else
116
+ cluster_id = options[:clusterid]
117
+ api.post_spark_job_existing_cluster(project_name, script_name, git_ref, cluster_id,
118
+ :project_script_path => script.rel_path,
119
+ :script_arguments => script_arguments).body
120
+ end
121
+ end
122
+
123
+ display("job_id: #{response['job_id']}")
124
+ display
125
+ display("Job status can be viewed on the web at:\n\n #{response['web_job_url']}")
126
+ display
127
+
128
+ response['job_id']
129
+ end
130
+
131
+ alias_command "spark:run", "spark"
132
+ end
@@ -76,6 +76,13 @@ module Mortar
76
76
  generate_file "client.cfg.template", "client.cfg.template"
77
77
  end
78
78
 
79
+ mkdir "sparkscripts"
80
+
81
+ inside "sparkscripts" do
82
+ copy_file "README", "README"
83
+ generate_file "sparkscript.py", "#{project_name}_pi.py"
84
+ end
85
+
79
86
  mkdir "lib"
80
87
  inside "lib" do
81
88
  copy_file "README", "README"
data/lib/mortar/git.rb CHANGED
@@ -202,6 +202,7 @@ module Mortar
202
202
  #
203
203
  def ensure_valid_mortar_project_manifest()
204
204
  if File.exists? project_manifest_name
205
+ ensure_sparkscripts_in_project_manifest()
205
206
  ensure_luigiscripts_in_project_manifest()
206
207
  ensure_gitignore_in_project_manifest()
207
208
  add_newline_to_file(project_manifest_name)
@@ -210,6 +211,18 @@ module Mortar
210
211
  end
211
212
  end
212
213
 
214
+ #
215
+ # Ensure that the sparkscripts directory,
216
+ # which was added after some project manifests were
217
+ # created, is in the manifest (if sparkscripts exists).
218
+ #
219
+ def ensure_sparkscripts_in_project_manifest
220
+ sparkscripts_path = "sparkscripts"
221
+ if File.directory? sparkscripts_path
222
+ add_entry_to_mortar_project_manifest(project_manifest_name, sparkscripts_path)
223
+ end
224
+ end
225
+
213
226
  #
214
227
  # Ensure that the luigiscripts directory,
215
228
  # which was added after some project manifests were
@@ -250,6 +263,10 @@ module Mortar
250
263
  manifest.puts "luigiscripts"
251
264
  end
252
265
 
266
+ if File.directory? "#{path}/sparkscripts"
267
+ manifest.puts "sparkscripts"
268
+ end
269
+
253
270
  if File.exists? "#{path}/.gitignore"
254
271
  manifest.puts ".gitignore"
255
272
  end
@@ -85,6 +85,19 @@ module Mortar
85
85
  @luigiscripts
86
86
  end
87
87
 
88
+ def sparkscripts_path
89
+ File.join(@root_path, "sparkscripts")
90
+ end
91
+
92
+ def sparkscripts
93
+ @sparkscripts ||= SparkScripts.new(
94
+ sparkscripts_path,
95
+ "sparkscripts",
96
+ "",
97
+ :optional => true)
98
+ @sparkscripts
99
+ end
100
+
88
101
  def tmp_path
89
102
  path = File.join(@root_path, "tmp")
90
103
  unless File.directory? path
@@ -138,7 +151,6 @@ module Mortar
138
151
  if File.directory? @path
139
152
  # get {script_name => full_path}
140
153
  file_paths = Dir[File.join(@path, "**", "*#{@filename_extension}")]
141
-
142
154
  scripts = file_paths.collect{|element_path| [element_name(element_path), element(element_name(element_path), element_path)]}
143
155
 
144
156
  #Check for duplicates.
@@ -179,6 +191,12 @@ module Mortar
179
191
  end
180
192
  end
181
193
 
194
+ class SparkScripts < ProjectEntity
195
+ def element(name, path)
196
+ SparkScript.new(name, path)
197
+ end
198
+ end
199
+
182
200
  class PythonUDFs < ProjectEntity
183
201
  def element(name, path)
184
202
  Script.new(name, path)
@@ -219,6 +237,14 @@ module Mortar
219
237
  end
220
238
 
221
239
  end
240
+
241
+ class SparkScript < Script
242
+
243
+ def executable_path
244
+ "sparkscripts/#{self.name}"
245
+ end
246
+
247
+ end
222
248
 
223
249
  class ControlScript < Script
224
250
 
@@ -8,6 +8,7 @@
8
8
 
9
9
  lib
10
10
  luigiscripts
11
+ sparkscripts
11
12
  macros
12
13
  pigscripts
13
14
  udfs
@@ -0,0 +1 @@
1
+ The sparkscripts directory is where your spark scripts should be stored.
@@ -0,0 +1,46 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one or more
3
+ # contributor license agreements. See the NOTICE file distributed with
4
+ # this work for additional information regarding copyright ownership.
5
+ # The ASF licenses this file to You under the Apache License, Version 2.0
6
+ # (the "License"); you may not use this file except in compliance with
7
+ # the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+ #
17
+
18
+ """
19
+ This is an example Spark script to get you started. This script will run
20
+ a simple Spark script that will calculate the value of pi.
21
+
22
+ To Run:
23
+ mortar spark sparkscripts/<%= project_name %>_pi.py [partitions]"
24
+ """
25
+
26
+ import sys
27
+ from random import random
28
+ from operator import add
29
+
30
+ from pyspark import SparkContext
31
+
32
+
33
+ if __name__ == "__main__":
34
+ sc = SparkContext(appName="PythonPi")
35
+ partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
36
+ n = 100000 * partitions
37
+
38
+ def f(_):
39
+ x = random() * 2 - 1
40
+ y = random() * 2 - 1
41
+ return 1 if x ** 2 + y ** 2 < 1 else 0
42
+
43
+ count = sc.parallelize(xrange(1, n + 1), partitions).map(f).reduce(add)
44
+ print "Pi is roughly %f" % (4.0 * count / n)
45
+
46
+ sc.stop()
@@ -16,5 +16,5 @@
16
16
 
17
17
  module Mortar
18
18
  # see http://semver.org/
19
- VERSION = "0.15.32"
19
+ VERSION = "0.15.33"
20
20
  end
@@ -32,24 +32,26 @@ module Mortar::Command
32
32
  "size" => 2,
33
33
  "status_description" => "Running",
34
34
  "start_timestamp" => "2012-08-27T21:27:15.669000+00:00",
35
- "duration" => "2 mins"},
35
+ "duration" => "2 mins",
36
+ "cluster_backend_description" => "Hadoop 1 (Pig)" },
36
37
  {"cluster_id" => "50fbe5a23004292547fc2225",
37
- "size" => 10,
38
- "status_description" => "Shut Down",
39
- "start_timestamp" => "2011-08-27T21:27:15.669000+00:00",
40
- "duration" => "20 mins"}]
41
- mock(Mortar::Auth.api).get_clusters().returns(Excon::Response.new(:body => {"clusters" => clusters}))
38
+ "size" => 10,
39
+ "status_description" => "Shut Down",
40
+ "start_timestamp" => "2011-08-27T21:27:15.669000+00:00",
41
+ "duration" => "20 mins",
42
+ "cluster_backend_description" => "Hadoop 2 (Spark)" }]
43
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__ALL).returns(Excon::Response.new(:body => {"clusters" => clusters}))
42
44
  stderr, stdout = execute("clusters", nil, nil)
43
45
  stdout.should == <<-STDOUT
44
- cluster_id Size (# of Nodes) Status Type Start Timestamp Elapsed Time
45
- ------------------------ ----------------- --------- ---- -------------------------------- ------------
46
- 50fbe5a23004292547fc2224 2 Running 2012-08-27T21:27:15.669000+00:00 2 mins
47
- 50fbe5a23004292547fc2225 10 Shut Down 2011-08-27T21:27:15.669000+00:00 20 mins
46
+ cluster_id Size (# of Nodes) Status Type Start Timestamp Elapsed Time Cluster Backend
47
+ ------------------------ ----------------- --------- ---- -------------------------------- ------------ ----------------
48
+ 50fbe5a23004292547fc2224 2 Running 2012-08-27T21:27:15.669000+00:00 2 mins Hadoop 1 (Pig)
49
+ 50fbe5a23004292547fc2225 10 Shut Down 2011-08-27T21:27:15.669000+00:00 20 mins Hadoop 2 (Spark)
48
50
  STDOUT
49
51
  end
50
52
 
51
53
  it "handles no clusters running" do
52
- mock(Mortar::Auth.api).get_clusters().returns(Excon::Response.new(:body => {"clusters" => []}))
54
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__ALL).returns(Excon::Response.new(:body => {"clusters" => []}))
53
55
  stderr, stdout = execute("clusters", nil, nil)
54
56
  stdout.should == <<-STDOUT
55
57
  There are no running or recent clusters
@@ -40,6 +40,7 @@ describe Mortar::Command::Generate do
40
40
  File.exists?("Test/pigscripts/Test.pig").should be_true
41
41
  File.exists?("Test/udfs/python/Test.py").should be_true
42
42
  File.exists?("Test/luigiscripts/README").should be_true
43
+ File.exists?("Test/sparkscripts/README").should be_true
43
44
 
44
45
  File.read("Test/pigscripts/Test.pig").each_line { |line| line.match(/<%.*%>/).should be_nil }
45
46
  end
@@ -281,7 +281,7 @@ STDOUT
281
281
  job_url = "http://127.0.0.1:5000/jobs/job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5"
282
282
  cluster_size = 2
283
283
 
284
- mock(Mortar::Auth.api).get_clusters() {Excon::Response.new(:body => {'clusters' => []})}
284
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_1) {Excon::Response.new(:body => {'clusters' => []})}
285
285
  mock(Mortar::Auth.api).post_pig_job_new_cluster("myproject", "my_script", is_a(String), cluster_size,
286
286
  :pig_version => "0.9",
287
287
  :project_script_path => be_a_kind_of(String),
@@ -319,7 +319,7 @@ STDOUT
319
319
  job_url = "http://127.0.0.1:5000/jobs/job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5"
320
320
  cluster_size = 2
321
321
 
322
- mock(Mortar::Auth.api).get_clusters() {Excon::Response.new(:body => {'clusters' => []})}
322
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_1) {Excon::Response.new(:body => {'clusters' => []})}
323
323
  mock(Mortar::Auth.api).post_pig_job_new_cluster("myproject", "my_script", is_a(String), cluster_size,
324
324
  :pig_version => "0.9",
325
325
  :project_script_path => be_a_kind_of(String),
@@ -399,7 +399,7 @@ STDOUT
399
399
  huge_busy_cluster_status = Mortar::API::Clusters::STATUS_RUNNING
400
400
 
401
401
 
402
- mock(Mortar::Auth.api).get_clusters() {
402
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_1) {
403
403
  Excon::Response.new(:body => {
404
404
  'clusters' => [
405
405
  { 'cluster_id' => small_cluster_id, 'size' => small_cluster_size, 'running_jobs' => [], 'status_code' => small_cluster_status },
@@ -154,11 +154,15 @@ STDOUT
154
154
  File.exists?("luigiscripts/README").should be_true
155
155
  File.exists?("luigiscripts/some_new_project_luigi.py").should be_true
156
156
  File.exists?("luigiscripts/client.cfg.template").should be_true
157
+ File.exists?("sparkscripts/README").should be_true
158
+ File.exists?("sparkscripts/some_new_project_pi.py").should be_true
157
159
  File.exists?("lib/README").should be_true
158
160
  File.exists?("params/README").should be_true
159
161
 
160
162
  File.read("pigscripts/some_new_project.pig").each_line { |line| line.match(/<%.*%>/).should be_nil }
161
163
  File.read("luigiscripts/some_new_project_luigi.py").each_line { |line| line.match(/<%.*%>/).should be_nil }
164
+ File.read("sparkscripts/some_new_project_pi.py").each_line { |line| line.match(/<%.*%>/).should be_nil }
165
+
162
166
 
163
167
  stdout.should == <<-STDOUT
164
168
  \r\e[0KVerifying GitHub username: /\r\e[0KVerifying GitHub username: -\r\e[0KVerifying GitHub username: Done!
@@ -185,6 +189,9 @@ Sending request to register project: some_new_project... done
185
189
  \e[1;32m create\e[0m luigiscripts/README
186
190
  \e[1;32m create\e[0m luigiscripts/some_new_project_luigi.py
187
191
  \e[1;32m create\e[0m luigiscripts/client.cfg.template
192
+ \e[1;32m create\e[0m sparkscripts
193
+ \e[1;32m create\e[0m sparkscripts/README
194
+ \e[1;32m create\e[0m sparkscripts/some_new_project_pi.py
188
195
  \e[1;32m create\e[0m lib
189
196
  \e[1;32m create\e[0m lib/README
190
197
  \e[1;32m create\e[0m params
@@ -0,0 +1,154 @@
1
+ #
2
+ # Copyright 2014 Mortar Data Inc.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ require 'spec_helper'
18
+ require 'fakefs/spec_helpers'
19
+ require 'mortar/command/spark'
20
+ require 'mortar/api/jobs'
21
+
22
+ module Mortar::Command
23
+ describe Spark do
24
+
25
+ before(:each) do
26
+ stub_core
27
+ @git = Mortar::Git::Git.new
28
+ end
29
+
30
+ context("index") do
31
+ it "shows help when user adds help argument" do
32
+ with_git_initialized_project do |p|
33
+ stderr_dash_h, stdout_dash_h = execute("spark -h", p, @git)
34
+ stderr_help, stdout_help = execute("spark help", p, @git)
35
+ stdout_dash_h.should == stdout_help
36
+ stderr_dash_h.should == stderr_help
37
+ end
38
+ end
39
+
40
+ it "runs a spark job with no arguments new cluster" do
41
+ with_git_initialized_project do |p|
42
+ # stub api requests
43
+ job_id = "c571a8c7f76a4fd4a67c103d753e2dd5"
44
+ job_url = "http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5"
45
+ mock(Mortar::Auth.api).post_spark_job_new_cluster("myproject", "my_script.py", is_a(String), 4,
46
+ :project_script_path => be_a_kind_of(String),
47
+ :script_arguments => "",
48
+ :cluster_type=>"persistent",
49
+ :use_spot_instances=>false
50
+ ) {Excon::Response.new(:body => {"job_id" => job_id, "web_job_url" => job_url})}
51
+
52
+ write_file(File.join(p.sparkscripts_path, "my_script.py"))
53
+ stderr, stdout = execute("spark sparkscripts/my_script.py --clustersize 4", p, @git)
54
+ puts stderr
55
+ stdout.should == <<-STDOUT
56
+ Taking code snapshot... done
57
+ Sending code snapshot to Mortar... done
58
+ Requesting job execution... done
59
+ job_id: c571a8c7f76a4fd4a67c103d753e2dd5
60
+
61
+ Job status can be viewed on the web at:
62
+
63
+ http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5
64
+
65
+ STDOUT
66
+ end
67
+ end
68
+
69
+ it "runs a spark job with script_arguments existing cluster" do
70
+ with_git_initialized_project do |p|
71
+ # stub api requests
72
+ job_id = "c571a8c7f76a4fd4a67c103d753e2dd5"
73
+ cluster_id = "c571a8c7f76a4fd4a67c103d753e2dd7"
74
+ job_url = "http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5"
75
+ script_arguments = "arg1 arg2 arg3"
76
+ mock(Mortar::Auth.api).post_spark_job_existing_cluster("myproject", "my_script.py", is_a(String), cluster_id,
77
+ :project_script_path => be_a_kind_of(String),
78
+ :script_arguments => script_arguments
79
+ ) {Excon::Response.new(:body => {"job_id" => job_id, "web_job_url" => job_url})}
80
+
81
+ write_file(File.join(p.sparkscripts_path, "my_script.py"))
82
+ stderr, stdout = execute("spark sparkscripts/my_script.py --clusterid #{cluster_id} #{script_arguments}", p, @git)
83
+ stdout.should == <<-STDOUT
84
+ Taking code snapshot... done
85
+ Sending code snapshot to Mortar... done
86
+ Requesting job execution... done
87
+ job_id: c571a8c7f76a4fd4a67c103d753e2dd5
88
+
89
+ Job status can be viewed on the web at:
90
+
91
+ http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5
92
+
93
+ STDOUT
94
+ end
95
+ end
96
+
97
+ it "runs a spark job with on free cluster" do
98
+ with_git_initialized_project do |p|
99
+ # stub api requests
100
+ job_id = "c571a8c7f76a4fd4a67c103d753e2dd5"
101
+ job_url = "http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5"
102
+ script_arguments = "arg1 arg2 arg3"
103
+
104
+ small_cluster_id = '510beb6b3004860820ab6538'
105
+ small_cluster_size = 2
106
+ small_cluster_status = Mortar::API::Clusters::STATUS_RUNNING
107
+ large_cluster_id = '510bf0db3004860820ab6590'
108
+ large_cluster_size = 5
109
+ large_cluster_status = Mortar::API::Clusters::STATUS_RUNNING
110
+ starting_cluster_id = '510bf0db3004860820abaaaa'
111
+ starting_cluster_size = 10
112
+ starting_cluster_status = Mortar::API::Clusters::STATUS_STARTING
113
+ huge_busy_cluster_id = '510bf0db3004860820ab6621'
114
+ huge_busy_cluster_size = 20
115
+ huge_busy_cluster_status = Mortar::API::Clusters::STATUS_RUNNING
116
+
117
+
118
+ mock(Mortar::Auth.api).get_clusters(Mortar::API::Jobs::CLUSTER_BACKEND__EMR_HADOOP_2) {
119
+ Excon::Response.new(:body => {
120
+ 'clusters' => [
121
+ { 'cluster_id' => small_cluster_id, 'size' => small_cluster_size, 'running_jobs' => [], 'status_code' => small_cluster_status },
122
+ { 'cluster_id' => large_cluster_id, 'size' => large_cluster_size, 'running_jobs' => [], 'status_code' => large_cluster_status },
123
+ { 'cluster_id' => starting_cluster_id, 'size' => starting_cluster_size, 'running_jobs' => [], 'status_code' => starting_cluster_status },
124
+ { 'cluster_id' => huge_busy_cluster_id, 'size' => huge_busy_cluster_size,
125
+ 'running_jobs' => [ { 'job_id' => 'c571a8c7f76a4fd4a67c103d753e2dd5',
126
+ 'job_name' => "", 'start_timestamp' => ""} ], 'status_code' => huge_busy_cluster_status }
127
+ ]})
128
+ }
129
+
130
+ mock(Mortar::Auth.api).post_spark_job_existing_cluster("myproject", "my_script.py", is_a(String), large_cluster_id,
131
+ :project_script_path => be_a_kind_of(String),
132
+ :script_arguments => script_arguments
133
+ ) {Excon::Response.new(:body => {"job_id" => job_id, "web_job_url" => job_url})}
134
+
135
+ write_file(File.join(p.sparkscripts_path, "my_script.py"))
136
+ stderr, stdout = execute("spark sparkscripts/my_script.py #{script_arguments}", p, @git)
137
+ stdout.should == <<-STDOUT
138
+ Taking code snapshot... done
139
+ Sending code snapshot to Mortar... done
140
+ Defaulting to running job on largest existing free cluster, id = 510bf0db3004860820ab6590, size = 5
141
+ Requesting job execution... done
142
+ job_id: c571a8c7f76a4fd4a67c103d753e2dd5
143
+
144
+ Job status can be viewed on the web at:
145
+
146
+ http://127.0.0.1:5000/jobs/spark_job_detail?job_id=c571a8c7f76a4fd4a67c103d753e2dd5
147
+
148
+ STDOUT
149
+ end
150
+ end
151
+
152
+ end
153
+ end
154
+ end
@@ -154,6 +154,7 @@ macros
154
154
  pigscripts
155
155
  udfs
156
156
  luigiscripts
157
+ sparkscripts
157
158
  MANIFEST0
158
159
  manifest_path = File.join(p.root_path, "project.manifest")
159
160
  write_file(manifest_path, manifest_without_gitignore)
@@ -170,6 +171,7 @@ macros
170
171
  pigscripts
171
172
  udfs
172
173
  luigiscripts
174
+ sparkscripts
173
175
  .gitignore
174
176
  MANIFEST0AFTER
175
177
  end
@@ -190,6 +192,7 @@ macros
190
192
  pigscripts
191
193
  udfs
192
194
  luigiscripts
195
+ sparkscripts
193
196
  MANIFEST1
194
197
  manifest_path = File.join(p.root_path, "project.manifest")
195
198
  write_file(manifest_path, manifest_without_gitignore)
@@ -206,6 +209,7 @@ macros
206
209
  pigscripts
207
210
  udfs
208
211
  luigiscripts
212
+ sparkscripts
209
213
  MANIFEST1AFTER
210
214
  end
211
215
  end
@@ -277,6 +281,73 @@ MANIFEST1AFTER
277
281
  end
278
282
  end
279
283
 
284
+ it "adds sparkscripts if the directory exists and manifest does not have it" do
285
+ with_git_initialized_project do |p|
286
+ # ensure luigiscripts path exists
287
+ sparkscripts_path = File.join(p.root_path, "sparkscripts")
288
+ unless File.directory? sparkscripts_path
289
+ FileUtils.mkdir_p(sparkscripts_path)
290
+ end
291
+
292
+ # remove it from manifest
293
+ manifest_without_sparkscripts = <<-MANIFEST0
294
+ lib
295
+ macros
296
+ pigscripts
297
+ udfs
298
+ MANIFEST0
299
+ manifest_path = File.join(p.root_path, "project.manifest")
300
+ write_file(manifest_path, manifest_without_sparkscripts)
301
+
302
+ project_manifest_before = File.open(manifest_path, "r").read
303
+ project_manifest_before.include?("sparkscripts").should be_false
304
+
305
+ @git.ensure_sparkscripts_in_project_manifest()
306
+
307
+ project_manifest_after = File.open(manifest_path, "r").read
308
+ project_manifest_after.should == <<-MANIFEST0AFTER
309
+ lib
310
+ macros
311
+ pigscripts
312
+ udfs
313
+ sparkscripts
314
+ MANIFEST0AFTER
315
+ end
316
+ end
317
+
318
+ it "does not add sparkscripts if the directory does not exist" do
319
+ with_git_initialized_project do |p|
320
+ # ensure sparkscripts path does not exist
321
+ sparkscripts_path = File.join(p.root_path, "sparkscripts")
322
+ if File.directory? sparkscripts_path
323
+ FileUtils.rm_rf(sparkscripts_path)
324
+ end
325
+
326
+ # remove it from manifest
327
+ manifest_without_sparkscripts = <<-MANIFEST1
328
+ lib
329
+ macros
330
+ pigscripts
331
+ udfs
332
+ MANIFEST1
333
+ manifest_path = File.join(p.root_path, "project.manifest")
334
+ write_file(manifest_path, manifest_without_sparkscripts)
335
+
336
+ project_manifest_before = File.open(manifest_path, "r").read
337
+ project_manifest_before.include?("sparkscripts").should be_false
338
+
339
+ @git.ensure_sparkscripts_in_project_manifest()
340
+
341
+ project_manifest_after = File.open(manifest_path, "r").read
342
+ project_manifest_after.should == <<-MANIFEST1AFTER
343
+ lib
344
+ macros
345
+ pigscripts
346
+ udfs
347
+ MANIFEST1AFTER
348
+ end
349
+ end
350
+
280
351
  it "handles manifest with no trailing newline" do
281
352
  with_git_initialized_project do |p|
282
353
  # ensure luigiscripts path exists
data/spec/spec_helper.rb CHANGED
@@ -180,6 +180,7 @@ def with_blank_project_with_name(name, &block)
180
180
  FileUtils.mkdir_p(project_path)
181
181
 
182
182
  # setup project subdirectories
183
+ FileUtils.mkdir_p(File.join(project_path, "sparkscripts"))
183
184
  FileUtils.mkdir_p(File.join(project_path, "luigiscripts"))
184
185
  FileUtils.mkdir_p(File.join(project_path, "pigscripts"))
185
186
  FileUtils.mkdir_p(File.join(project_path, "macros"))
metadata CHANGED
@@ -1,230 +1,204 @@
1
- --- !ruby/object:Gem::Specification
1
+ --- !ruby/object:Gem::Specification
2
2
  name: mortar
3
- version: !ruby/object:Gem::Version
4
- hash: 99
5
- prerelease:
6
- segments:
7
- - 0
8
- - 15
9
- - 32
10
- version: 0.15.32
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.15.33
11
5
  platform: ruby
12
- authors:
6
+ authors:
13
7
  - Mortar Data
14
8
  autorequire:
15
9
  bindir: bin
16
10
  cert_chain: []
17
-
18
- date: 2014-11-11 00:00:00 Z
19
- dependencies:
20
- - !ruby/object:Gem::Dependency
11
+ date: 2015-01-21 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
21
14
  name: rdoc
22
- prerelease: false
23
- requirement: &id001 !ruby/object:Gem::Requirement
24
- none: false
25
- requirements:
26
- - - ">="
27
- - !ruby/object:Gem::Version
28
- hash: 63
29
- segments:
30
- - 4
31
- - 0
32
- - 0
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
33
19
  version: 4.0.0
34
20
  type: :runtime
35
- version_requirements: *id001
36
- - !ruby/object:Gem::Dependency
37
- name: mortar-api-ruby
38
21
  prerelease: false
39
- requirement: &id002 !ruby/object:Gem::Requirement
40
- none: false
41
- requirements:
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ! '>='
25
+ - !ruby/object:Gem::Version
26
+ version: 4.0.0
27
+ - !ruby/object:Gem::Dependency
28
+ name: mortar-api-ruby
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
42
31
  - - ~>
43
- - !ruby/object:Gem::Version
44
- hash: 41
45
- segments:
46
- - 0
47
- - 8
48
- - 11
49
- version: 0.8.11
32
+ - !ruby/object:Gem::Version
33
+ version: 0.8.12
50
34
  type: :runtime
51
- version_requirements: *id002
52
- - !ruby/object:Gem::Dependency
53
- name: netrc
54
35
  prerelease: false
55
- requirement: &id003 !ruby/object:Gem::Requirement
56
- none: false
57
- requirements:
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
58
38
  - - ~>
59
- - !ruby/object:Gem::Version
60
- hash: 5
61
- segments:
62
- - 0
63
- - 7
64
- version: "0.7"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.8.12
41
+ - !ruby/object:Gem::Dependency
42
+ name: netrc
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ~>
46
+ - !ruby/object:Gem::Version
47
+ version: '0.7'
65
48
  type: :runtime
66
- version_requirements: *id003
67
- - !ruby/object:Gem::Dependency
68
- name: launchy
69
49
  prerelease: false
70
- requirement: &id004 !ruby/object:Gem::Requirement
71
- none: false
72
- requirements:
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
73
52
  - - ~>
74
- - !ruby/object:Gem::Version
75
- hash: 1
76
- segments:
77
- - 2
78
- - 1
79
- version: "2.1"
53
+ - !ruby/object:Gem::Version
54
+ version: '0.7'
55
+ - !ruby/object:Gem::Dependency
56
+ name: launchy
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ~>
60
+ - !ruby/object:Gem::Version
61
+ version: '2.1'
80
62
  type: :runtime
81
- version_requirements: *id004
82
- - !ruby/object:Gem::Dependency
83
- name: parseconfig
84
63
  prerelease: false
85
- requirement: &id005 !ruby/object:Gem::Requirement
86
- none: false
87
- requirements:
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
88
66
  - - ~>
89
- - !ruby/object:Gem::Version
90
- hash: 19
91
- segments:
92
- - 1
93
- - 0
94
- - 2
67
+ - !ruby/object:Gem::Version
68
+ version: '2.1'
69
+ - !ruby/object:Gem::Dependency
70
+ name: parseconfig
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ~>
74
+ - !ruby/object:Gem::Version
95
75
  version: 1.0.2
96
76
  type: :runtime
97
- version_requirements: *id005
98
- - !ruby/object:Gem::Dependency
99
- name: aws-sdk
100
77
  prerelease: false
101
- requirement: &id006 !ruby/object:Gem::Requirement
102
- none: false
103
- requirements:
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ~>
81
+ - !ruby/object:Gem::Version
82
+ version: 1.0.2
83
+ - !ruby/object:Gem::Dependency
84
+ name: aws-sdk
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
104
87
  - - ~>
105
- - !ruby/object:Gem::Version
106
- hash: 15
107
- segments:
108
- - 1
109
- - 0
110
- version: "1.0"
88
+ - !ruby/object:Gem::Version
89
+ version: '1.0'
111
90
  type: :runtime
112
- version_requirements: *id006
113
- - !ruby/object:Gem::Dependency
114
- name: nokogiri
115
91
  prerelease: false
116
- requirement: &id007 !ruby/object:Gem::Requirement
117
- none: false
118
- requirements:
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
119
94
  - - ~>
120
- - !ruby/object:Gem::Version
121
- hash: 3
122
- segments:
123
- - 1
124
- - 5
125
- - 0
95
+ - !ruby/object:Gem::Version
96
+ version: '1.0'
97
+ - !ruby/object:Gem::Dependency
98
+ name: nokogiri
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ~>
102
+ - !ruby/object:Gem::Version
126
103
  version: 1.5.0
127
104
  type: :runtime
128
- version_requirements: *id007
129
- - !ruby/object:Gem::Dependency
130
- name: excon
131
105
  prerelease: false
132
- requirement: &id008 !ruby/object:Gem::Requirement
133
- none: false
134
- requirements:
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
135
108
  - - ~>
136
- - !ruby/object:Gem::Version
137
- hash: 51
138
- segments:
139
- - 0
140
- - 28
141
- version: "0.28"
109
+ - !ruby/object:Gem::Version
110
+ version: 1.5.0
111
+ - !ruby/object:Gem::Dependency
112
+ name: excon
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ~>
116
+ - !ruby/object:Gem::Version
117
+ version: '0.28'
142
118
  type: :development
143
- version_requirements: *id008
144
- - !ruby/object:Gem::Dependency
145
- name: fakefs
146
119
  prerelease: false
147
- requirement: &id009 !ruby/object:Gem::Requirement
148
- none: false
149
- requirements:
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ~>
123
+ - !ruby/object:Gem::Version
124
+ version: '0.28'
125
+ - !ruby/object:Gem::Dependency
126
+ name: fakefs
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
150
129
  - - ~>
151
- - !ruby/object:Gem::Version
152
- hash: 11
153
- segments:
154
- - 0
155
- - 4
156
- - 2
130
+ - !ruby/object:Gem::Version
157
131
  version: 0.4.2
158
132
  type: :development
159
- version_requirements: *id009
160
- - !ruby/object:Gem::Dependency
161
- name: gem-release
162
133
  prerelease: false
163
- requirement: &id010 !ruby/object:Gem::Requirement
164
- none: false
165
- requirements:
166
- - - ">="
167
- - !ruby/object:Gem::Version
168
- hash: 3
169
- segments:
170
- - 0
171
- version: "0"
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - ~>
137
+ - !ruby/object:Gem::Version
138
+ version: 0.4.2
139
+ - !ruby/object:Gem::Dependency
140
+ name: gem-release
141
+ requirement: !ruby/object:Gem::Requirement
142
+ requirements:
143
+ - - ! '>='
144
+ - !ruby/object:Gem::Version
145
+ version: '0'
172
146
  type: :development
173
- version_requirements: *id010
174
- - !ruby/object:Gem::Dependency
175
- name: rake
176
147
  prerelease: false
177
- requirement: &id011 !ruby/object:Gem::Requirement
178
- none: false
179
- requirements:
148
+ version_requirements: !ruby/object:Gem::Requirement
149
+ requirements:
150
+ - - ! '>='
151
+ - !ruby/object:Gem::Version
152
+ version: '0'
153
+ - !ruby/object:Gem::Dependency
154
+ name: rake
155
+ requirement: !ruby/object:Gem::Requirement
156
+ requirements:
180
157
  - - ~>
181
- - !ruby/object:Gem::Version
182
- hash: 73
183
- segments:
184
- - 10
185
- - 1
186
- - 1
158
+ - !ruby/object:Gem::Version
187
159
  version: 10.1.1
188
160
  type: :development
189
- version_requirements: *id011
190
- - !ruby/object:Gem::Dependency
191
- name: rr
192
161
  prerelease: false
193
- requirement: &id012 !ruby/object:Gem::Requirement
194
- none: false
195
- requirements:
196
- - - ">="
197
- - !ruby/object:Gem::Version
198
- hash: 3
199
- segments:
200
- - 0
201
- version: "0"
162
+ version_requirements: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ~>
165
+ - !ruby/object:Gem::Version
166
+ version: 10.1.1
167
+ - !ruby/object:Gem::Dependency
168
+ name: rr
169
+ requirement: !ruby/object:Gem::Requirement
170
+ requirements:
171
+ - - ! '>='
172
+ - !ruby/object:Gem::Version
173
+ version: '0'
202
174
  type: :development
203
- version_requirements: *id012
204
- - !ruby/object:Gem::Dependency
205
- name: rspec
206
175
  prerelease: false
207
- requirement: &id013 !ruby/object:Gem::Requirement
208
- none: false
209
- requirements:
176
+ version_requirements: !ruby/object:Gem::Requirement
177
+ requirements:
178
+ - - ! '>='
179
+ - !ruby/object:Gem::Version
180
+ version: '0'
181
+ - !ruby/object:Gem::Dependency
182
+ name: rspec
183
+ requirement: !ruby/object:Gem::Requirement
184
+ requirements:
210
185
  - - ~>
211
- - !ruby/object:Gem::Version
212
- hash: 3
213
- segments:
214
- - 2
215
- - 0
216
- version: "2.0"
186
+ - !ruby/object:Gem::Version
187
+ version: '2.0'
217
188
  type: :development
218
- version_requirements: *id013
189
+ prerelease: false
190
+ version_requirements: !ruby/object:Gem::Requirement
191
+ requirements:
192
+ - - ~>
193
+ - !ruby/object:Gem::Version
194
+ version: '2.0'
219
195
  description: Client library and command-line tool to interact with the Mortar service.
220
196
  email: support@mortardata.com
221
- executables:
197
+ executables:
222
198
  - mortar
223
199
  extensions: []
224
-
225
200
  extra_rdoc_files: []
226
-
227
- files:
201
+ files:
228
202
  - README.md
229
203
  - bin/mortar
230
204
  - css/illustrate.css
@@ -254,6 +228,7 @@ files:
254
228
  - lib/mortar/command/plugins.rb
255
229
  - lib/mortar/command/projects.rb
256
230
  - lib/mortar/command/s3.rb
231
+ - lib/mortar/command/spark.rb
257
232
  - lib/mortar/command/validate.rb
258
233
  - lib/mortar/command/version.rb
259
234
  - lib/mortar/conf/luigi/logging.ini
@@ -314,6 +289,8 @@ files:
314
289
  - lib/mortar/templates/project/project.manifest
315
290
  - lib/mortar/templates/project/project.properties
316
291
  - lib/mortar/templates/project/requirements.txt
292
+ - lib/mortar/templates/project/sparkscripts/README
293
+ - lib/mortar/templates/project/sparkscripts/sparkscript.py
317
294
  - lib/mortar/templates/project/udfs/java/gitkeep
318
295
  - lib/mortar/templates/project/udfs/jython/gitkeep
319
296
  - lib/mortar/templates/project/udfs/python/python_udf.py
@@ -347,6 +324,7 @@ files:
347
324
  - spec/mortar/command/pigscripts_spec.rb
348
325
  - spec/mortar/command/projects_spec.rb
349
326
  - spec/mortar/command/s3_spec.rb
327
+ - spec/mortar/command/spark_spec.rb
350
328
  - spec/mortar/command/validate_spec.rb
351
329
  - spec/mortar/command/version_spec.rb
352
330
  - spec/mortar/command_spec.rb
@@ -370,38 +348,25 @@ files:
370
348
  - spec/support/display_message_matcher.rb
371
349
  homepage: http://mortardata.com/
372
350
  licenses: []
373
-
351
+ metadata: {}
374
352
  post_install_message:
375
353
  rdoc_options: []
376
-
377
- require_paths:
354
+ require_paths:
378
355
  - lib
379
- required_ruby_version: !ruby/object:Gem::Requirement
380
- none: false
381
- requirements:
382
- - - ">="
383
- - !ruby/object:Gem::Version
384
- hash: 57
385
- segments:
386
- - 1
387
- - 8
388
- - 7
356
+ required_ruby_version: !ruby/object:Gem::Requirement
357
+ requirements:
358
+ - - ! '>='
359
+ - !ruby/object:Gem::Version
389
360
  version: 1.8.7
390
- required_rubygems_version: !ruby/object:Gem::Requirement
391
- none: false
392
- requirements:
393
- - - ">="
394
- - !ruby/object:Gem::Version
395
- hash: 3
396
- segments:
397
- - 0
398
- version: "0"
361
+ required_rubygems_version: !ruby/object:Gem::Requirement
362
+ requirements:
363
+ - - ! '>='
364
+ - !ruby/object:Gem::Version
365
+ version: '0'
399
366
  requirements: []
400
-
401
367
  rubyforge_project:
402
- rubygems_version: 1.8.15
368
+ rubygems_version: 2.2.2
403
369
  signing_key:
404
- specification_version: 3
370
+ specification_version: 4
405
371
  summary: Client library and CLI to interact with the Mortar service.
406
372
  test_files: []
407
-