s3_website_revived 4.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +15 -0
  3. data/.travis.yml +5 -0
  4. data/Gemfile +3 -0
  5. data/LICENSE +42 -0
  6. data/README.md +591 -0
  7. data/Rakefile +2 -0
  8. data/additional-docs/debugging.md +21 -0
  9. data/additional-docs/development.md +29 -0
  10. data/additional-docs/example-configurations.md +113 -0
  11. data/additional-docs/running-from-ec2-with-dropbox.md +6 -0
  12. data/additional-docs/setting-up-aws-credentials.md +52 -0
  13. data/assembly.sbt +3 -0
  14. data/bin/s3_website +285 -0
  15. data/build.sbt +48 -0
  16. data/changelog.md +596 -0
  17. data/lib/s3_website/version.rb +3 -0
  18. data/lib/s3_website.rb +7 -0
  19. data/project/assembly.sbt +1 -0
  20. data/project/build.properties +1 -0
  21. data/project/plugins.sbt +1 -0
  22. data/release +41 -0
  23. data/resources/configuration_file_template.yml +67 -0
  24. data/resources/s3_website.jar.md5 +1 -0
  25. data/s3_website-4.0.0.jar +0 -0
  26. data/s3_website.gemspec +34 -0
  27. data/sbt +3 -0
  28. data/src/main/resources/log4j.properties +6 -0
  29. data/src/main/scala/s3/website/ByteHelper.scala +18 -0
  30. data/src/main/scala/s3/website/CloudFront.scala +144 -0
  31. data/src/main/scala/s3/website/Logger.scala +67 -0
  32. data/src/main/scala/s3/website/Push.scala +246 -0
  33. data/src/main/scala/s3/website/Ruby.scala +14 -0
  34. data/src/main/scala/s3/website/S3.scala +239 -0
  35. data/src/main/scala/s3/website/UploadHelper.scala +76 -0
  36. data/src/main/scala/s3/website/model/Config.scala +249 -0
  37. data/src/main/scala/s3/website/model/S3Endpoint.scala +35 -0
  38. data/src/main/scala/s3/website/model/Site.scala +159 -0
  39. data/src/main/scala/s3/website/model/push.scala +225 -0
  40. data/src/main/scala/s3/website/model/ssg.scala +30 -0
  41. data/src/main/scala/s3/website/package.scala +182 -0
  42. data/src/test/scala/s3/website/AwsSdkSpec.scala +15 -0
  43. data/src/test/scala/s3/website/ConfigSpec.scala +150 -0
  44. data/src/test/scala/s3/website/S3EndpointSpec.scala +15 -0
  45. data/src/test/scala/s3/website/S3WebsiteSpec.scala +1480 -0
  46. data/src/test/scala/s3/website/UnitTest.scala +11 -0
  47. data/vagrant/Vagrantfile +25 -0
  48. metadata +195 -0
@@ -0,0 +1,21 @@
1
+ # Tips for debugging
2
+
3
+ ## Debugging with source code
4
+
5
+ First, clone the git repository:
6
+
7
+ git clone https://github.com/ivoanjo/s3_website_revived.git /tmp/s3_website
8
+
9
+ Next, edit a source file.
10
+
11
+ For example, you can change the AWS logging level in the
12
+ [src/main/resources/log4j.properties](https://github.com/laurilehmijoki/s3_website/blob/master/src/main/resources/log4j.properties) file. See [AWS SDK for Java docs](http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/java-dg-logging.html).
13
+
14
+ Another example: modify a `.scala` file by adding a `print()` statement into a
15
+ relevant location.
16
+
17
+ Then push your website with the cloned code:
18
+
19
+ cd YOUR_WEBSITE_DIR
20
+ /tmp/s3_website/bin/s3_website push
21
+
@@ -0,0 +1,29 @@
1
+ ## Coding
2
+
3
+ Install a Scala editor. Intellij IDEA has great Scala support.
4
+
5
+ If you use IDEA, install the [Grep
6
+ Console](http://plugins.jetbrains.com/plugin/?idea&pluginId=7125) plugin. It
7
+ shows the ANSI colors in your IDEA console.
8
+
9
+ ### Test runs with IDEA
10
+
11
+ 1. Create a run profile: *Run* –> *Edit Configurations...*.
12
+ 2. Add *Application*
13
+ 3. Set *Main class* to `s3.website.Push`
14
+ 4. Set *Program arguments* to `--site=/Users/you/yourtestsite/_site --config-dir=/Users/you/yourtestsite --verbose`
15
+
16
+ ## Automated tests
17
+
18
+ ./sbt test
19
+
20
+ ## Test Linux distributions
21
+
22
+ Use Vagrant for testing the installation procedure on Linux.
23
+
24
+ Here's howto:
25
+
26
+ 1. Install <https://www.vagrantup.com/downloads.html>
27
+ 2. `cd vagrant && vagrant status`
28
+ 3. launch with `vagrant up <name>` and ssh into with `vagrant ssh <name>`
29
+ 4. test the latest release with `gem install s3_website && s3_website push`
@@ -0,0 +1,113 @@
1
+ # Example `s3_website` configurations
2
+
3
+ This document shows examples of complete `s3_website.yml` configurations.
4
+
5
+ ## Minimal
6
+
7
+ ````yaml
8
+ s3_bucket: your.domain.net
9
+ ````
10
+
11
+ This configuration will use AWS access credentials from the environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. If those are not set, it will fall back to the credentials saved by `aws configure`.
12
+
13
+ If you run `s3_website` on an EC2 instance with IAM roles, this configuration will use the instance's role instead.
14
+
15
+ ## Minimal with explicit credentials
16
+
17
+ ````yaml
18
+ s3_id: abcd
19
+ s3_secret: 2s+x92
20
+ s3_bucket: your.domain.net
21
+ ````
22
+
23
+ Use caution when embedding AWS credentials directly in `s3_website.yml`. Do not commit the file to a public Git repository or share it publicly.
24
+
25
+ ## Minimal for temporary security credentials
26
+
27
+ ````yaml
28
+ s3_id: abcd
29
+ s3_secret: 2s+x92
30
+ session_token: hex!xeh
31
+ s3_bucket: your.domain.net
32
+ ````
33
+
34
+ ## Optimised for speed
35
+
36
+ Use CloudFront, gzip, cache headers and greater concurrency:
37
+
38
+ ````yaml
39
+ s3_bucket: your.domain.net
40
+ cloudfront_distribution_id: <%= ENV['your_domain_net_cloudfront_distribution_id'] %>
41
+ cloudfront_distribution_config:
42
+ default_cache_behavior:
43
+ min_ttl: <%= 60 * 60 * 24 %>
44
+ aliases:
45
+ quantity: 1
46
+ items:
47
+ - your.domain.net
48
+ max_age: 120
49
+ gzip: true
50
+ ````
51
+
52
+ In this example, we keep the CloudFront distribution ID in an environment variable.
53
+ This is convenient, since you can keep the `s3_website.yml` in a public Git repo, and
54
+ thus have your deployment configurations version-controlled.
55
+
56
+ ## Setup for HTTP2 and Custom SNI SSL Certificate
57
+
58
+ To fully utilize HTTP2 you'll need to setup SSL for your distribution. While HTTP/2 does
59
+ not mandate the use of encryption, it turns out that [all of the common web browsers
60
+ require the use of HTTPS connections in conjunction with HTTP/2](http://caniuse.com/#feat=http2).
61
+ Therefore, you may need to make some changes to your site or application in order
62
+ to take full advantage of HTTP/2. While you can test the site by using the Default
63
+ CloudFront Certificate you will likely want to use a custom SSL Certificate.
64
+ This isn't yet automated by s3_website, [but is a few manual steps](https://medium.com/@richardkall/setup-lets-encrypt-ssl-certificate-on-amazon-cloudfront-b217669987b2#.7jyust8os),
65
+ which is now free thanks to Let's Encrypt.
66
+
67
+ ````yaml
68
+ s3_bucket: your.domain.net
69
+ cloudfront_distribution_id: <%= ENV['your_domain_net_cloudfront_distribution_id'] %>
70
+ cloudfront_distribution_config:
71
+ default_cache_behavior:
72
+ min_ttl: <%= 60 * 60 * 24 %>
73
+ http_version: http2
74
+ max_age: 120
75
+ gzip: true
76
+ ````
77
+
78
+ ## Multiple CNAMEs
79
+
80
+ Sometimes you want to use multiple CNAMEs aliases in your CloudFront distribution:
81
+
82
+ ````yaml
83
+ s3_bucket: your.domain.net
84
+ cloudfront_distribution_id: <%= ENV['your_domain_net_cloudfront_distribution_id'] %>
85
+ cloudfront_distribution_config:
86
+ default_cache_behavior:
87
+ min_ttl: <%= 60 * 60 * 24 %>
88
+ aliases:
89
+ quantity: 3
90
+ items:
91
+ - your1.domain.net
92
+ - your2.domain.net
93
+ - your3.domain.net
94
+ max_age: 120
95
+ gzip: true
96
+ ````
97
+
98
+ Always remember to set the 'quantity' property to match the number of items you have.
99
+
100
+ ## Using redirects
101
+
102
+ ````yaml
103
+ redirects:
104
+ index.php: /
105
+ about.php: about.html
106
+ routing_rules:
107
+ - condition:
108
+ key_prefix_equals: code/repositories/git/
109
+ redirect:
110
+ host_name: git.johnny.com
111
+ replace_key_prefix_with: ""
112
+ http_redirect_code: 301
113
+ ````
@@ -0,0 +1,6 @@
1
+ # Running from an EC2 instance with jekyll files on Dropbox
2
+
3
+ Based on [this](http://namelesshorror.com/2015/02/26/jekyll-dropbox-aws-and-ifttt-easy-blogging/) article about automating the deployment of jekyll via an EC2 instance, I wrote the following shell script for use with such a setup. It assumes that you have a Linux Amazon EC2 instance, with the Dropbox client running as a daemon, and s3_website installed and configured. The script could be run from the default user's cron every so often, and allow anyone to effectively serve their jekyll source files to AWS from Dropbox.
4
+
5
+
6
+ [jekyll-s3-dropbox.sh](https://gist.github.com/RNCTX/359489a5432937578bf5736850917d70)
@@ -0,0 +1,52 @@
1
+ # Setting up AWS credentials
2
+
3
+ Before starting to use s3\_website, you need to create AWS credentials.
4
+
5
+ ## Easy setup
6
+
7
+ * Go to [AWS IAM console](https://console.aws.amazon.com/iam)
8
+ * Create a new user that has full permissions to the S3 and CloudFront services
9
+ * Call `s3_website cfg create` and place the credentials of your new AWS user
10
+ into the *s3_website.yml* file
11
+ * Read the main documentation for further info
12
+
13
+ ## Limiting the permissions of the credentials
14
+
15
+ AWS IAM offers multiple ways of limiting the permissions of a user. Below is one
16
+ way of configuring the limitations and yet retaining the capability to use all
17
+ s3\_website features.
18
+
19
+ If you know the hostname of your public website (say `my.website.com`), perform the
20
+ following steps:
21
+
22
+ * Create a user that has full permissions to the S3 bucket
23
+ * In addition, let the user have full permissions to CloudFront
24
+
25
+ Here is the IAM Policy Document of the above setup:
26
+
27
+ ```json
28
+ {
29
+ "Version":"2012-10-17",
30
+ "Statement": [
31
+ {
32
+ "Action": [
33
+ "cloudfront:*"
34
+ ],
35
+ "Effect": "Allow",
36
+ "Resource": [
37
+ "*"
38
+ ]
39
+ },
40
+ {
41
+ "Action": [
42
+ "s3:*"
43
+ ],
44
+ "Effect": "Allow",
45
+ "Resource": [
46
+ "arn:aws:s3:::my.website.com",
47
+ "arn:aws:s3:::my.website.com/*"
48
+ ]
49
+ }
50
+ ]
51
+ }
52
+ ```
data/assembly.sbt ADDED
@@ -0,0 +1,3 @@
1
+ import AssemblyKeys._ // put this at the top of the file
2
+
3
+ assemblySettings
data/bin/s3_website ADDED
@@ -0,0 +1,285 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require File.expand_path(File.dirname(__FILE__)+ '/../lib/s3_website')
4
+ require 'colored'
5
+ require 'digest/md5'
6
+
7
+ class Cfg < Thor
8
+ def self.exit_on_failure?
9
+ true
10
+ end
11
+
12
+ desc 'create', 'Create a config file with placeholder values'
13
+ def create
14
+ config_file_src = File.dirname(__FILE__) + '/../resources/configuration_file_template.yml'
15
+ config_file = Dir.pwd + '/s3_website.yml'
16
+ unless File.exists? config_file
17
+ require 'fileutils'
18
+ FileUtils.cp config_file_src, config_file
19
+ puts "Created the config file s3_website.yml. Go fill it with your settings."
20
+ end
21
+ rescue Exception => error
22
+ puts "#{error.message} (#{error.class})"
23
+ exit 1
24
+ end
25
+
26
+ option(
27
+ :headless,
28
+ :type => :boolean,
29
+ :desc => "Apply the settings headlessly. See --autocreate-cloudfront-distribution for more info."
30
+ )
31
+ option(
32
+ 'autocreate-cloudfront-dist',
33
+ :type => :boolean,
34
+ :desc => "When used with --headless, automatically create a CloudFront distribution for your S3 website."
35
+ )
36
+ option(
37
+ 'config-dir',
38
+ :type => :string,
39
+ :desc => "The directory where your config file is. When not defined, s3_website will look in the current working directory.",
40
+ :default => "."
41
+ )
42
+ desc 'apply', 'Apply the configuration on the AWS services'
43
+ long_desc <<-LONGDESC
44
+ `s3_website cfg apply` will apply the configuration the S3 bucket.
45
+
46
+ In addition, if you CloudFront related settings, this command will apply them on the CloudFront distribution.
47
+
48
+ If the S3 bucket does not exist, this command will create it
49
+ and configure it to function as a website.
50
+ LONGDESC
51
+ def apply
52
+ puts 'Applying the configurations in s3_website.yml on the AWS services ...'
53
+ require 'configure-s3-website'
54
+ config_source = ConfigureS3Website::FileConfigSource.new "#{options['config-dir']}/s3_website.yml"
55
+ ConfigureS3Website::Runner.run({
56
+ :config_source => config_source,
57
+ :headless => options[:headless],
58
+ 'autocreate-cloudfront-dist' => options['autocreate-cloudfront-dist']
59
+ })
60
+ rescue Exception => error
61
+ puts "#{error.message} (#{error.class})"
62
+ exit 1
63
+ end
64
+ end
65
+
66
+ class Cli < Thor
67
+ def self.exit_on_failure?
68
+ true
69
+ end
70
+
71
+ option(
72
+ :site,
73
+ :type => :string,
74
+ :desc => "The directory where your website files are. When not defined, s3_website will look for the site in either _site or public/output."
75
+ )
76
+ option(
77
+ 'config-dir'.to_sym,
78
+ :type => :string,
79
+ :desc => "The directory where your config file is. When not defined, s3_website will look in the current working directory."
80
+ )
81
+ option(
82
+ :verbose,
83
+ :type => :boolean,
84
+ :default => false,
85
+ :desc => "Print verbose output"
86
+ )
87
+ option(
88
+ :force,
89
+ :type => :boolean,
90
+ :default => false,
91
+ :desc => "Skip diff calculation and push all the files. This option is useful when you need to update metadata on the S3 objects."
92
+ )
93
+ option(
94
+ :dry_run,
95
+ :type => :boolean,
96
+ :default => false,
97
+ :desc => "Run the operation without actually making the modifications. When this switch is on, changes will not be applied on the S3 website. They will be only printed to the console."
98
+ )
99
+ desc 'push', 'Push local files with the S3 website'
100
+ long_desc <<-LONGDESC
101
+ `s3_website push` will upload new and changes files to S3. It will
102
+ also delete from S3 the files that you no longer have locally.
103
+ LONGDESC
104
+ def push
105
+ project_root = File.expand_path(File.dirname(__FILE__)+ '/..')
106
+ logger = Logger.new(options[:verbose])
107
+ success =
108
+ if false && run_with_sbt(project_root) # TODO: Disabled this because current sbt is too old
109
+ Dir.chdir(project_root) {
110
+ system './sbt assembly' # Build the jar
111
+ }
112
+ system "java -cp #{project_root}/target/scala-2.11/s3_website.jar #{resolve_java_command 'push'}"
113
+ else
114
+ # Find the jar
115
+ jar_file = resolve_jar(project_root, logger)
116
+ # Then run it
117
+ run_s3_website_jar(jar_file, logger)
118
+ end
119
+
120
+ if success
121
+ exit 0
122
+ else
123
+ exit 1
124
+ end
125
+ end
126
+
127
+ desc 'install', 'Download s3_website.jar'
128
+ long_desc <<-LONGDESC
129
+ Use `s3_website install` to download the s3_website.jar dependency
130
+ before you run `s3_website push` (good for CI dependency caching).
131
+ LONGDESC
132
+ def install
133
+ project_root = File.expand_path(File.dirname(__FILE__)+ '/..')
134
+ logger = Logger.new(options[:verbose])
135
+ resolve_jar(project_root, logger)
136
+ end
137
+
138
+ desc 'cfg SUBCOMMAND ...ARGS', 'Operate on the config file'
139
+ subcommand 'cfg', Cfg
140
+ end
141
+
142
+ def run_with_sbt(project_root)
143
+ File.exists?(project_root + '/project/sbt-launch.jar')
144
+ end
145
+
146
+ def resolve_java_command(command_name)
147
+ args = ARGV.join(' ').sub(command_name, '')
148
+ "s3.website.#{command_name.capitalize} #{args}"
149
+ end
150
+
151
+ def run_s3_website_jar(jar_file, logger)
152
+ java_installed = (resolve_exit_status('which java') or resolve_exit_status('java -version'))
153
+ unless java_installed
154
+ logger.info_msg "Cannot find Java. s3_website push is implemented in Scala, and it needs Java to run."
155
+ autoinstall_java_or_print_help_and_exit(logger)
156
+ end
157
+ logger.debug_msg "Using #{jar_file}"
158
+ system("java -cp #{jar_file} #{resolve_java_command 'push'}")
159
+ end
160
+
161
+ def resolve_exit_status(cmd)
162
+ `#{cmd}`
163
+ cmd_succeeded = $? == 0
164
+ rescue
165
+ cmd_succeeded = false
166
+ end
167
+
168
+ def autoinstall_java_or_print_help_and_exit(logger)
169
+ @logger = logger
170
+ automatic_methods = [
171
+ {
172
+ :package_manager_lookup => 'which apt-get',
173
+ :install_command => 'sudo apt-get install --assume-yes openjdk-7-jre'
174
+ },
175
+ {
176
+ :package_manager_lookup => 'which yum',
177
+ :install_command => 'sudo yum install --assumeyes java-1.7.0-openjdk'
178
+ },
179
+ {
180
+ :package_manager_lookup => 'which dnf',
181
+ :install_command => 'sudo dnf install --assumeyes java-1.7.0-openjdk'
182
+ }
183
+ ]
184
+
185
+ def print_manual_method_and_exit
186
+ @logger.info_msg 'Go to http://java.com, install Java and then try again.'
187
+ @logger.info_msg "(If you cannot or do not want to install Java, you can use latest 1.x version of this gem, which requires only Ruby. For more info, see https://github.com/laurilehmijoki/s3_website/tree/1.x)"
188
+ exit 1
189
+ end
190
+
191
+ automatic_method = automatic_methods.find { |automatic_method|
192
+ resolve_exit_status automatic_method.fetch(:package_manager_lookup)
193
+ }
194
+ if automatic_method
195
+ @logger.info_msg "Do you want me to install Java with the command `#{automatic_method.fetch(:install_command)}`? [Y/n]"
196
+ user_answer = $stdin.gets
197
+ if user_answer.chomp.downcase == 'y' or user_answer == "\n"
198
+ automatic_method_succeeded = system automatic_method.fetch(:install_command)
199
+ unless automatic_method_succeeded
200
+ @logger.fail_msg "Could not automatically install Java. Try setting it up manually:"
201
+ print_manual_method_and_exit
202
+ end
203
+ else
204
+ print_manual_method_and_exit
205
+ end
206
+ else
207
+ print_manual_method_and_exit
208
+ end
209
+ end
210
+
211
+ def resolve_jar(project_root, logger)
212
+ jar_lookup_paths = [
213
+ project_root + "/s3_website-#{S3Website::VERSION}.jar",
214
+ (ENV['TMPDIR'] || '/tmp') + "/s3_website-#{S3Website::VERSION}.jar"
215
+ ]
216
+ found_jar = jar_lookup_paths.
217
+ select { |jar_path|
218
+ File.exist? jar_path
219
+ }.
220
+ first
221
+ # def jar_has_valid_checksum(jar_path, logger)
222
+ # expected_checksum = File.read(File.dirname(__FILE__) + '/../resources/s3_website.jar.md5')
223
+ # found_checksum = Digest::MD5.file(jar_path).hexdigest
224
+ # if expected_checksum == found_checksum
225
+ # true
226
+ # else
227
+ # logger.info_msg "The jar file is corrupted. (Expected checksum #{expected_checksum} but got #{found_checksum}.)"
228
+ # false
229
+ # end
230
+ # end
231
+ jar_file =
232
+ if found_jar #and jar_has_valid_checksum(found_jar, logger)
233
+ found_jar
234
+ else
235
+ download_jar(jar_lookup_paths, logger)
236
+ end
237
+ end
238
+
239
+ def download_jar(jar_lookup_paths, logger)
240
+ tag_name = "v#{S3Website::VERSION}"
241
+ downloaded_jar = jar_lookup_paths.select { |jar_path|
242
+ File.writable? File.dirname(jar_path)
243
+ }.first
244
+ unless downloaded_jar
245
+ logger.fail_msg "Neither #{jar_lookup_paths.join ' or '} is writable. Cannot download s3_website.jar."
246
+ logger.fail_msg "Set either directory as writable to the current user and try again."
247
+ exit 1
248
+ end
249
+ download_url = "https://github.com/laurilehmijoki/s3_website/releases/download/#{tag_name}/s3_website.jar"
250
+ logger.info_msg "Downloading #{download_url} into #{downloaded_jar}"
251
+ require 'open-uri'
252
+ open(downloaded_jar, 'wb') do |file|
253
+ file << URI.open(download_url).read
254
+ end
255
+ downloaded_jar
256
+ end
257
+
258
+ class Logger
259
+ attr_reader :verbose
260
+ def initialize(verbose)
261
+ @verbose = verbose
262
+ end
263
+
264
+ def debug_msg(msg)
265
+ if verbose
266
+ print_msg 'debg'.cyan, msg
267
+ end
268
+ end
269
+
270
+ def info_msg(msg)
271
+ print_msg 'info'.blue, msg
272
+ end
273
+
274
+ def fail_msg(msg)
275
+ print_msg 'fail'.red, msg
276
+ end
277
+
278
+ private
279
+
280
+ def print_msg(prefix, msg)
281
+ puts "[#{prefix}] #{msg}"
282
+ end
283
+ end
284
+
285
+ Cli.start(ARGV)
data/build.sbt ADDED
@@ -0,0 +1,48 @@
1
+ import AssemblyKeys._
2
+
3
+ name := "s3_website"
4
+
5
+ version := "0.0.1"
6
+
7
+ scalaVersion := "2.11.12"
8
+
9
+ scalacOptions += "-feature"
10
+
11
+ scalacOptions += "-language:implicitConversions"
12
+
13
+ scalacOptions += "-language:postfixOps"
14
+
15
+ scalacOptions += "-target:jvm-1.6"
16
+
17
+ libraryDependencies += "org.yaml" % "snakeyaml" % "1.13"
18
+
19
+ libraryDependencies += "org.jruby" % "jruby" % "9.3.3.0"
20
+
21
+ libraryDependencies += "com.amazonaws" % "aws-java-sdk-s3" % "1.12.145" excludeAll ExclusionRule(organization = "com.fasterxml")
22
+ libraryDependencies += "com.amazonaws" % "aws-java-sdk-cloudfront" % "1.12.145" excludeAll ExclusionRule(organization = "com.fasterxml")
23
+ libraryDependencies += "com.amazonaws" % "aws-java-sdk-sts" % "1.12.145" excludeAll ExclusionRule(organization = "com.fasterxml")
24
+ libraryDependencies += "javax.xml.bind" % "jaxb-api" % "2.3.1"
25
+
26
+ libraryDependencies += "log4j" % "log4j" % "1.2.17"
27
+
28
+ libraryDependencies += "commons-codec" % "commons-codec" % "1.9"
29
+
30
+ libraryDependencies += "commons-io" % "commons-io" % "2.4"
31
+
32
+ libraryDependencies += "org.apache.tika" % "tika-core" % "1.4"
33
+
34
+ libraryDependencies += "com.lexicalscope.jewelcli" % "jewelcli" % "0.8.9"
35
+
36
+ libraryDependencies += "org.specs2" %% "specs2" % "2.3.11" % "test"
37
+
38
+ resolvers += "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
39
+
40
+ jarName in assembly := "s3_website.jar"
41
+
42
+ test in assembly := {}
43
+
44
+ mergeStrategy in assembly := {
45
+ case PathList("module-info.class", xs @ _*) => MergeStrategy.discard
46
+ case PathList("META-INF", "MANIFEST.MF", xs @ _*) => MergeStrategy.discard
47
+ case x => MergeStrategy.first
48
+ }