s3_website_revived 4.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +15 -0
- data/.travis.yml +5 -0
- data/Gemfile +3 -0
- data/LICENSE +42 -0
- data/README.md +591 -0
- data/Rakefile +2 -0
- data/additional-docs/debugging.md +21 -0
- data/additional-docs/development.md +29 -0
- data/additional-docs/example-configurations.md +113 -0
- data/additional-docs/running-from-ec2-with-dropbox.md +6 -0
- data/additional-docs/setting-up-aws-credentials.md +52 -0
- data/assembly.sbt +3 -0
- data/bin/s3_website +285 -0
- data/build.sbt +48 -0
- data/changelog.md +596 -0
- data/lib/s3_website/version.rb +3 -0
- data/lib/s3_website.rb +7 -0
- data/project/assembly.sbt +1 -0
- data/project/build.properties +1 -0
- data/project/plugins.sbt +1 -0
- data/release +41 -0
- data/resources/configuration_file_template.yml +67 -0
- data/resources/s3_website.jar.md5 +1 -0
- data/s3_website-4.0.0.jar +0 -0
- data/s3_website.gemspec +34 -0
- data/sbt +3 -0
- data/src/main/resources/log4j.properties +6 -0
- data/src/main/scala/s3/website/ByteHelper.scala +18 -0
- data/src/main/scala/s3/website/CloudFront.scala +144 -0
- data/src/main/scala/s3/website/Logger.scala +67 -0
- data/src/main/scala/s3/website/Push.scala +246 -0
- data/src/main/scala/s3/website/Ruby.scala +14 -0
- data/src/main/scala/s3/website/S3.scala +239 -0
- data/src/main/scala/s3/website/UploadHelper.scala +76 -0
- data/src/main/scala/s3/website/model/Config.scala +249 -0
- data/src/main/scala/s3/website/model/S3Endpoint.scala +35 -0
- data/src/main/scala/s3/website/model/Site.scala +159 -0
- data/src/main/scala/s3/website/model/push.scala +225 -0
- data/src/main/scala/s3/website/model/ssg.scala +30 -0
- data/src/main/scala/s3/website/package.scala +182 -0
- data/src/test/scala/s3/website/AwsSdkSpec.scala +15 -0
- data/src/test/scala/s3/website/ConfigSpec.scala +150 -0
- data/src/test/scala/s3/website/S3EndpointSpec.scala +15 -0
- data/src/test/scala/s3/website/S3WebsiteSpec.scala +1480 -0
- data/src/test/scala/s3/website/UnitTest.scala +11 -0
- data/vagrant/Vagrantfile +25 -0
- metadata +195 -0
@@ -0,0 +1,159 @@
|
|
1
|
+
package s3.website.model
|
2
|
+
|
3
|
+
import java.io.File
|
4
|
+
import s3.website.Push.CliArgs
|
5
|
+
import s3.website.model.Ssg.autodetectSiteDir
|
6
|
+
|
7
|
+
import scala.util.Try
|
8
|
+
import org.yaml.snakeyaml.Yaml
|
9
|
+
import s3.website.model.Config._
|
10
|
+
import scala.io.Source.fromFile
|
11
|
+
import scala.language.postfixOps
|
12
|
+
import s3.website.{S3Key, Logger, ErrorReport}
|
13
|
+
import scala.util.Failure
|
14
|
+
import s3.website.model.Config.UnsafeYaml
|
15
|
+
import scala.util.Success
|
16
|
+
|
17
|
+
case class Site(rootDirectory: File, config: Config) {
|
18
|
+
def resolveS3Key(file: File) = S3Key.build(
|
19
|
+
file.getAbsolutePath.replace(rootDirectory.getAbsolutePath, "").replace(File.separator,"/").replaceFirst("^/", ""),
|
20
|
+
config.s3_key_prefix
|
21
|
+
)
|
22
|
+
}
|
23
|
+
|
24
|
+
object Site {
|
25
|
+
def parseConfig(implicit logger: Logger, yamlConfig: S3_website_yml): Either[ErrorReport, Config] = {
|
26
|
+
val yamlObjectTry = for {
|
27
|
+
yamlString <- Try(fromFile(yamlConfig.file).mkString)
|
28
|
+
yamlWithErbEvaluated <- erbEval(yamlString, yamlConfig)
|
29
|
+
yamlObject <- Try(new Yaml() load yamlWithErbEvaluated)
|
30
|
+
} yield yamlObject
|
31
|
+
|
32
|
+
yamlObjectTry match {
|
33
|
+
case Success(yamlObject) =>
|
34
|
+
implicit val unsafeYaml = UnsafeYaml(yamlObject)
|
35
|
+
for {
|
36
|
+
s3_id <- loadOptionalString("s3_id").right
|
37
|
+
s3_secret <- loadOptionalString("s3_secret").right
|
38
|
+
session_token <- loadOptionalString("session_token").right
|
39
|
+
profile <- loadOptionalString("profile").right
|
40
|
+
profile_assume_role_arn <- loadOptionalString("profile_assume_role_arn").right
|
41
|
+
s3_bucket <- loadRequiredString("s3_bucket").right
|
42
|
+
s3_endpoint <- loadEndpoint.right
|
43
|
+
site <- loadOptionalString("site").right
|
44
|
+
max_age <- loadMaxAge.right
|
45
|
+
cache_control <- loadCacheControl.right
|
46
|
+
gzip <- loadOptionalBooleanOrStringSeq("gzip").right
|
47
|
+
gzip_zopfli <- loadOptionalBoolean("gzip_zopfli").right
|
48
|
+
extensionless_mime_type <- loadOptionalString("extensionless_mime_type").right
|
49
|
+
s3_key_prefix <- loadOptionalString("s3_key_prefix").right
|
50
|
+
ignore_on_server <- loadOptionalS3KeyRegexes("ignore_on_server").right
|
51
|
+
exclude_from_upload <- loadOptionalS3KeyRegexes("exclude_from_upload").right
|
52
|
+
s3_reduced_redundancy <- loadOptionalBoolean("s3_reduced_redundancy").right
|
53
|
+
cloudfront_distribution_id <- loadOptionalString("cloudfront_distribution_id").right
|
54
|
+
cloudfront_invalidate_root <- loadOptionalBoolean("cloudfront_invalidate_root").right
|
55
|
+
content_type <- loadContentType.right
|
56
|
+
concurrency_level <- loadOptionalInt("concurrency_level").right
|
57
|
+
cloudfront_wildcard_invalidation <- loadOptionalBoolean("cloudfront_wildcard_invalidation").right
|
58
|
+
redirects <- loadRedirects(s3_key_prefix).right
|
59
|
+
treat_zero_length_objects_as_redirects <- loadOptionalBoolean("treat_zero_length_objects_as_redirects").right
|
60
|
+
} yield {
|
61
|
+
gzip_zopfli.foreach(_ => logger.info(
|
62
|
+
"""|Zopfli is not currently supported. Falling back to regular gzip.
|
63
|
+
|If you find a stable Java implementation for zopfli, please send an email to lauri.lehmijoki@iki.fi about it."""
|
64
|
+
.stripMargin))
|
65
|
+
extensionless_mime_type.foreach(_ => logger.info(
|
66
|
+
s"Ignoring the extensionless_mime_type setting in $yamlConfig. Counting on Apache Tika to infer correct mime types.")
|
67
|
+
)
|
68
|
+
Config(
|
69
|
+
s3_id,
|
70
|
+
s3_secret,
|
71
|
+
session_token,
|
72
|
+
profile,
|
73
|
+
profile_assume_role_arn,
|
74
|
+
s3_bucket,
|
75
|
+
s3_endpoint getOrElse S3Endpoint.defaultEndpoint,
|
76
|
+
site,
|
77
|
+
max_age,
|
78
|
+
cache_control,
|
79
|
+
gzip,
|
80
|
+
gzip_zopfli,
|
81
|
+
s3_key_prefix,
|
82
|
+
ignore_on_server = ignore_on_server,
|
83
|
+
exclude_from_upload = exclude_from_upload,
|
84
|
+
s3_reduced_redundancy,
|
85
|
+
cloudfront_distribution_id,
|
86
|
+
cloudfront_invalidate_root,
|
87
|
+
content_type,
|
88
|
+
redirects,
|
89
|
+
concurrency_level.fold(20)(_ max 20),
|
90
|
+
cloudfront_wildcard_invalidation,
|
91
|
+
treat_zero_length_objects_as_redirects
|
92
|
+
)
|
93
|
+
}
|
94
|
+
case Failure(error) =>
|
95
|
+
Left(ErrorReport(error))
|
96
|
+
}
|
97
|
+
}
|
98
|
+
|
99
|
+
def loadSite(implicit yamlConfig: S3_website_yml, cliArgs: CliArgs, workingDirectory: File, logger: Logger): Either[ErrorReport, Site] =
|
100
|
+
parseConfig.right.flatMap { cfg =>
|
101
|
+
implicit val config: Config = cfg
|
102
|
+
resolveSiteDir.right.map(Site(_, config))
|
103
|
+
}
|
104
|
+
|
105
|
+
def noSiteFound(explanation: String) =
|
106
|
+
s"""|
|
107
|
+
|$explanation
|
108
|
+
|Either use the --site=DIR command-line argument or define the location of the site in s3_website.yml.
|
109
|
+
|
|
110
|
+
|Here's an example of how you can define the site directory in s3_website.yml:
|
111
|
+
| site: dist/website""".stripMargin
|
112
|
+
|
113
|
+
def resolveSiteDir(implicit yamlConfig: S3_website_yml, config: Config, cliArgs: CliArgs, workingDirectory: File): Either[ErrorReport, File] = {
|
114
|
+
val siteFromAutoDetect = if (config.site.isEmpty) { autodetectSiteDir(workingDirectory) } else { None }
|
115
|
+
val errOrSiteFromCliArgs: Either[ErrorReport, Option[File]] = Option(cliArgs.site) match {
|
116
|
+
case Some(siteDirFromCliArgs) =>
|
117
|
+
val f = new File(siteDirFromCliArgs)
|
118
|
+
if (f.exists())
|
119
|
+
Right(Some(f))
|
120
|
+
else
|
121
|
+
Left(ErrorReport(noSiteFound(s"Could not find a site at $siteDirFromCliArgs. Check the --site argument.")))
|
122
|
+
case None => Right(None)
|
123
|
+
}
|
124
|
+
|
125
|
+
val errOrAvailableSiteDirs: Either[ErrorReport, List[File]] = for {
|
126
|
+
s1 <- errOrSiteFromCliArgs.right
|
127
|
+
s2 <- siteFromConfig.right
|
128
|
+
s3 <- Right(siteFromAutoDetect).right
|
129
|
+
} yield {
|
130
|
+
(s1 :: s2 :: s3 :: Nil) collect {
|
131
|
+
case Some(file) => file
|
132
|
+
}
|
133
|
+
}
|
134
|
+
errOrAvailableSiteDirs.right.flatMap {
|
135
|
+
case mostPreferredSiteDir :: xs => Right(mostPreferredSiteDir)
|
136
|
+
case Nil => Left(ErrorReport(noSiteFound("Could not find a website.")))
|
137
|
+
}
|
138
|
+
}
|
139
|
+
|
140
|
+
def siteFromConfig(implicit yamlConfig: S3_website_yml, config: Config, workingDirectory: File): Either[ErrorReport, Option[File]] = {
|
141
|
+
val siteConfig = config
|
142
|
+
.site
|
143
|
+
.map(new File(_))
|
144
|
+
.map { siteDir =>
|
145
|
+
if (siteDir.isAbsolute) siteDir
|
146
|
+
else new File(yamlConfig.file.getParentFile, siteDir.getPath)
|
147
|
+
}
|
148
|
+
|
149
|
+
siteConfig match {
|
150
|
+
case s @ Some(siteDir) =>
|
151
|
+
if (siteDir.exists())
|
152
|
+
Right(s)
|
153
|
+
else
|
154
|
+
Left(ErrorReport(noSiteFound(s"Could not find a website. (The site setting in s3_website.yml points to a non-existing file $siteDir)")))
|
155
|
+
case None =>
|
156
|
+
Right(None)
|
157
|
+
}
|
158
|
+
}
|
159
|
+
}
|
@@ -0,0 +1,225 @@
|
|
1
|
+
package s3.website.model
|
2
|
+
|
3
|
+
import java.io.File.createTempFile
|
4
|
+
import java.io._
|
5
|
+
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
|
6
|
+
|
7
|
+
import com.amazonaws.services.s3.model.S3ObjectSummary
|
8
|
+
import org.apache.commons.codec.digest.DigestUtils
|
9
|
+
import org.apache.commons.io.IOUtils
|
10
|
+
import org.apache.tika.Tika
|
11
|
+
import s3.website._
|
12
|
+
import s3.website.model.Encoding.{Gzip, Zopfli}
|
13
|
+
import s3.website.model.Upload.{amountOfMagicGzipBytes, tika}
|
14
|
+
|
15
|
+
import scala.concurrent.{ExecutionContextExecutor, Future}
|
16
|
+
import scala.util.Try
|
17
|
+
|
18
|
+
object Encoding {
|
19
|
+
|
20
|
+
val defaultGzipExtensions = ".html" :: ".css" :: ".js" :: ".txt" :: ".ico" :: Nil
|
21
|
+
|
22
|
+
case class Gzip()
|
23
|
+
case class Zopfli()
|
24
|
+
}
|
25
|
+
|
26
|
+
sealed trait UploadType {
|
27
|
+
val pushAction: PushAction
|
28
|
+
}
|
29
|
+
|
30
|
+
case object NewFile extends UploadType {
|
31
|
+
val pushAction = Created
|
32
|
+
}
|
33
|
+
case object FileUpdate extends UploadType {
|
34
|
+
val pushAction = Updated
|
35
|
+
}
|
36
|
+
|
37
|
+
case object RedirectFile extends UploadType {
|
38
|
+
val pushAction = Redirected
|
39
|
+
}
|
40
|
+
|
41
|
+
case class Upload(originalFile: File, uploadType: UploadType)(implicit site: Site, logger: Logger) {
|
42
|
+
lazy val s3Key = site.resolveS3Key(originalFile)
|
43
|
+
|
44
|
+
lazy val encodingOnS3: Option[Either[Gzip, Zopfli]] =
|
45
|
+
site.config.gzip.flatMap { (gzipSetting: Either[Boolean, Seq[String]]) =>
|
46
|
+
val shouldZipThisFile = gzipSetting.fold(
|
47
|
+
shouldGzip => Encoding.defaultGzipExtensions exists s3Key.key.endsWith,
|
48
|
+
fileExtensions => fileExtensions exists s3Key.key.endsWith
|
49
|
+
)
|
50
|
+
if (shouldZipThisFile && site.config.gzip_zopfli.isDefined)
|
51
|
+
Some(Right(Zopfli()))
|
52
|
+
else if (shouldZipThisFile)
|
53
|
+
Some(Left(Gzip()))
|
54
|
+
else
|
55
|
+
None
|
56
|
+
}
|
57
|
+
|
58
|
+
lazy val gzipEnabledByConfig: Boolean = encodingOnS3.fold(false)((algorithm: Either[Gzip, Zopfli]) => true)
|
59
|
+
|
60
|
+
lazy val contentType: Try[String] = tika map { tika =>
|
61
|
+
val file = // This file contains the data that the user should see after decoding the the transport protocol (HTTP) encoding (practically: after ungzipping)
|
62
|
+
if (fileIsGzippedByExternalBuildTool) {
|
63
|
+
val unzippedFile = createTempFile("unzipped", originalFile.getName)
|
64
|
+
unzippedFile.deleteOnExit()
|
65
|
+
using(new GZIPInputStream(fis(originalFile))) { stream =>
|
66
|
+
IOUtils.copy(stream, new FileOutputStream(unzippedFile))
|
67
|
+
}
|
68
|
+
unzippedFile
|
69
|
+
} else {
|
70
|
+
originalFile
|
71
|
+
}
|
72
|
+
val mimeType =
|
73
|
+
site.config.content_type
|
74
|
+
.flatMap { _.globMatch(s3Key) }
|
75
|
+
.getOrElse { tika.detect(file) }
|
76
|
+
if (mimeType.startsWith("text/") || mimeType == "application/json")
|
77
|
+
mimeType + "; charset=utf-8"
|
78
|
+
else
|
79
|
+
mimeType
|
80
|
+
}
|
81
|
+
|
82
|
+
lazy val maxAge: Option[Int] =
|
83
|
+
site.config.max_age.flatMap(
|
84
|
+
_ fold(
|
85
|
+
(maxAge: Int) => Some(maxAge),
|
86
|
+
(globs: S3KeyGlob[Int]) => globs.globMatch(s3Key)
|
87
|
+
)
|
88
|
+
)
|
89
|
+
|
90
|
+
lazy val cacheControl: Option[String] =
|
91
|
+
site.config.cache_control.flatMap(
|
92
|
+
_ fold(
|
93
|
+
(cacheCtrl: String) => Some(cacheCtrl),
|
94
|
+
(globs: S3KeyGlob[String]) => globs.globMatch(s3Key)
|
95
|
+
)
|
96
|
+
)
|
97
|
+
|
98
|
+
/**
|
99
|
+
* May throw an exception, so remember to call this in a Try or Future monad
|
100
|
+
*/
|
101
|
+
lazy val md5 = uploadFile map { file =>
|
102
|
+
using(fis { file }) { DigestUtils.md5Hex }
|
103
|
+
}
|
104
|
+
|
105
|
+
// This is the file we should try to upload
|
106
|
+
lazy val uploadFile: Try[File] =
|
107
|
+
if (gzipEnabledByConfig)
|
108
|
+
Try {
|
109
|
+
if (fileIsGzippedByExternalBuildTool) {
|
110
|
+
logger.debug(s"File ${originalFile.getAbsolutePath} is already gzipped. Skipping gzip.")
|
111
|
+
originalFile
|
112
|
+
} else {
|
113
|
+
logger.debug(s"Gzipping file ${originalFile.getName}")
|
114
|
+
val tempFile = createTempFile(originalFile.getName, "gzip")
|
115
|
+
tempFile.deleteOnExit()
|
116
|
+
using(new GZIPOutputStream(new FileOutputStream(tempFile))) { stream =>
|
117
|
+
IOUtils.copy(fis(originalFile), stream)
|
118
|
+
}
|
119
|
+
tempFile
|
120
|
+
}
|
121
|
+
}
|
122
|
+
else
|
123
|
+
Try(originalFile)
|
124
|
+
|
125
|
+
private lazy val fileIsGzippedByExternalBuildTool = gzipEnabledByConfig && originalFileIsGzipped
|
126
|
+
|
127
|
+
private lazy val originalFileIsGzipped =
|
128
|
+
if (originalFile.length() < amountOfMagicGzipBytes) {
|
129
|
+
false
|
130
|
+
} else {
|
131
|
+
val fis = new FileInputStream(originalFile)
|
132
|
+
val firstTwoBytes = Array.fill[Byte](amountOfMagicGzipBytes)(0)
|
133
|
+
fis.read(firstTwoBytes, 0, amountOfMagicGzipBytes)
|
134
|
+
val head = firstTwoBytes(0) & 0xff | (firstTwoBytes(1) << 8) & 0xff00
|
135
|
+
head == GZIPInputStream.GZIP_MAGIC
|
136
|
+
}
|
137
|
+
|
138
|
+
private[this] def fis(file: File): InputStream = new FileInputStream(file)
|
139
|
+
private[this] def using[T <: Closeable, R](cl: T)(f: (T) => R): R = try f(cl) finally cl.close()
|
140
|
+
}
|
141
|
+
|
142
|
+
object Upload {
|
143
|
+
lazy val tika = Try(new Tika())
|
144
|
+
private val amountOfMagicGzipBytes = 2
|
145
|
+
}
|
146
|
+
|
147
|
+
object Files {
|
148
|
+
def recursiveListFiles(f: File): Seq[File] = {
|
149
|
+
val these = f.listFiles
|
150
|
+
if (these != null)
|
151
|
+
these ++ these.filter(_.isDirectory).flatMap(recursiveListFiles)
|
152
|
+
else
|
153
|
+
Nil
|
154
|
+
}
|
155
|
+
|
156
|
+
def listSiteFiles(implicit site: Site, logger: Logger) = {
|
157
|
+
def excludeFromUpload(s3Key: S3Key) = {
|
158
|
+
val excludeByConfig = site.config.exclude_from_upload exists {
|
159
|
+
_.s3KeyRegexes.exists(_ matches s3Key)
|
160
|
+
}
|
161
|
+
val neverUpload = "s3_website.yml" :: ".env" :: Nil map (k => S3Key.build(k, site.config.s3_key_prefix))
|
162
|
+
val doNotUpload = excludeByConfig || (neverUpload contains s3Key)
|
163
|
+
if (doNotUpload) logger.debug(s"Excluded $s3Key from upload")
|
164
|
+
doNotUpload
|
165
|
+
}
|
166
|
+
recursiveListFiles(site.rootDirectory)
|
167
|
+
.filterNot(_.isDirectory)
|
168
|
+
.filterNot(f => excludeFromUpload(site.resolveS3Key(f)))
|
169
|
+
}
|
170
|
+
}
|
171
|
+
|
172
|
+
case class Redirect(s3Key: S3Key, redirectTarget: String, needsUpload: Boolean) {
|
173
|
+
def uploadType = RedirectFile
|
174
|
+
}
|
175
|
+
|
176
|
+
private case class RedirectSetting(source: S3Key, target: String)
|
177
|
+
|
178
|
+
object Redirect {
|
179
|
+
type Redirects = Future[Either[ErrorReport, Seq[Redirect]]]
|
180
|
+
|
181
|
+
def resolveRedirects(s3FileFutures: Future[Either[ErrorReport, Seq[S3File]]])
|
182
|
+
(implicit config: Config, executor: ExecutionContextExecutor, pushOptions: PushOptions): Redirects = {
|
183
|
+
val redirectSettings = config.redirects.fold(Nil: Seq[RedirectSetting]) { sourcesToTargets =>
|
184
|
+
sourcesToTargets.foldLeft(Seq(): Seq[RedirectSetting]) {
|
185
|
+
(redirects, sourceToTarget) =>
|
186
|
+
redirects :+ RedirectSetting(sourceToTarget._1, applyRedirectRules(sourceToTarget._2))
|
187
|
+
}
|
188
|
+
}
|
189
|
+
def redirectsWithExistsOnS3Info =
|
190
|
+
s3FileFutures.map(_.right.map { s3Files =>
|
191
|
+
val existingRedirectKeys = s3Files.filter(_.size == 0).map(_.s3Key).toSet
|
192
|
+
redirectSettings.map(redirectSetting =>
|
193
|
+
Redirect(redirectSetting, needsUpload = !existingRedirectKeys.contains(redirectSetting.source))
|
194
|
+
)
|
195
|
+
})
|
196
|
+
val uploadOnlyMissingRedirects =
|
197
|
+
config.treat_zero_length_objects_as_redirects.contains(true) && !pushOptions.force
|
198
|
+
val allConfiguredRedirects = Future(Right(redirectSettings.map(redirectSetting =>
|
199
|
+
Redirect(redirectSetting, needsUpload = true)
|
200
|
+
)))
|
201
|
+
if (uploadOnlyMissingRedirects)
|
202
|
+
redirectsWithExistsOnS3Info
|
203
|
+
else
|
204
|
+
allConfiguredRedirects
|
205
|
+
}
|
206
|
+
|
207
|
+
private def applyRedirectRules(redirectTarget: String)(implicit config: Config) = {
|
208
|
+
val isExternalRedirect = redirectTarget.matches("https?:\\/\\/.*")
|
209
|
+
val isInSiteRedirect = redirectTarget.startsWith("/")
|
210
|
+
if (isInSiteRedirect || isExternalRedirect)
|
211
|
+
redirectTarget
|
212
|
+
else
|
213
|
+
s"${config.s3_key_prefix.map(prefix => s"/$prefix").getOrElse("")}/$redirectTarget"
|
214
|
+
}
|
215
|
+
|
216
|
+
def apply(redirectSetting: RedirectSetting, needsUpload: Boolean): Redirect =
|
217
|
+
Redirect(redirectSetting.source, redirectSetting.target, needsUpload)
|
218
|
+
}
|
219
|
+
|
220
|
+
case class S3File(s3Key: S3Key, md5: MD5, size: Long)
|
221
|
+
|
222
|
+
object S3File {
|
223
|
+
def apply(summary: S3ObjectSummary)(implicit site: Site): S3File =
|
224
|
+
S3File(S3Key.build(summary.getKey, None), summary.getETag, summary.getSize)
|
225
|
+
}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
package s3.website.model
|
2
|
+
|
3
|
+
import java.io.File
|
4
|
+
import s3.website.model.Files.recursiveListFiles
|
5
|
+
|
6
|
+
// ssg = static site generator
|
7
|
+
trait Ssg {
|
8
|
+
def outputDirectory: String
|
9
|
+
}
|
10
|
+
|
11
|
+
object Ssg {
|
12
|
+
val automaticallySupportedSiteGenerators = Jekyll :: Nanoc :: Middleman :: Nil
|
13
|
+
|
14
|
+
def autodetectSiteDir(workingDirectory: File): Option[File] =
|
15
|
+
recursiveListFiles(workingDirectory).find { file =>
|
16
|
+
file.isDirectory && automaticallySupportedSiteGenerators.exists(ssg => file.getAbsolutePath.endsWith(ssg.outputDirectory))
|
17
|
+
}
|
18
|
+
}
|
19
|
+
|
20
|
+
case object Jekyll extends Ssg {
|
21
|
+
def outputDirectory = "_site"
|
22
|
+
}
|
23
|
+
|
24
|
+
case object Nanoc extends Ssg {
|
25
|
+
def outputDirectory = s"public${File.separatorChar}output"
|
26
|
+
}
|
27
|
+
|
28
|
+
case object Middleman extends Ssg {
|
29
|
+
def outputDirectory = "build"
|
30
|
+
}
|
@@ -0,0 +1,182 @@
|
|
1
|
+
package s3
|
2
|
+
|
3
|
+
import s3.website.Ruby._
|
4
|
+
|
5
|
+
import scala.concurrent.{ExecutionContextExecutor, Future}
|
6
|
+
import scala.concurrent.duration.{TimeUnit, Duration}
|
7
|
+
import s3.website.S3.{PushSuccessReport, PushFailureReport}
|
8
|
+
import com.amazonaws.AmazonServiceException
|
9
|
+
import s3.website.model.{Config, Site}
|
10
|
+
import java.io.File
|
11
|
+
|
12
|
+
import scala.util.matching.Regex
|
13
|
+
|
14
|
+
package object website {
|
15
|
+
trait Report {
|
16
|
+
def reportMessage: String
|
17
|
+
}
|
18
|
+
trait SuccessReport extends Report
|
19
|
+
|
20
|
+
trait ErrorReport extends Report
|
21
|
+
|
22
|
+
object ErrorReport {
|
23
|
+
def apply(t: Throwable)(implicit logger: Logger) = new ErrorReport {
|
24
|
+
override def reportMessage = errorMessage(t)
|
25
|
+
}
|
26
|
+
|
27
|
+
def apply(msg: String) = new ErrorReport {
|
28
|
+
override def reportMessage = msg
|
29
|
+
}
|
30
|
+
|
31
|
+
def errorMessage(msg: String, t: Throwable)(implicit logger: Logger): String = s"$msg (${errorMessage(t)})"
|
32
|
+
|
33
|
+
def errorMessage(t: Throwable)(implicit logger: Logger): String = {
|
34
|
+
val extendedReport =
|
35
|
+
if (logger.verboseOutput)
|
36
|
+
Some(t.getStackTrace)
|
37
|
+
else
|
38
|
+
None
|
39
|
+
s"${t.getMessage}${extendedReport.fold("")(stackTraceElems => "\n" + stackTraceElems.mkString("\n"))}"
|
40
|
+
}
|
41
|
+
}
|
42
|
+
|
43
|
+
trait RetrySetting {
|
44
|
+
def retryTimeUnit: TimeUnit
|
45
|
+
}
|
46
|
+
|
47
|
+
trait PushOptions {
|
48
|
+
/**
|
49
|
+
* @return true if the CLI option --dry-run is on
|
50
|
+
*/
|
51
|
+
def dryRun: Boolean
|
52
|
+
|
53
|
+
/**
|
54
|
+
* @return true if the CLI option --force is on
|
55
|
+
*/
|
56
|
+
def force: Boolean
|
57
|
+
}
|
58
|
+
|
59
|
+
case class S3KeyRegex(keyRegex: Regex) {
|
60
|
+
def matches(s3Key: S3Key) = rubyRegexMatches(s3Key.key, keyRegex.pattern.pattern())
|
61
|
+
}
|
62
|
+
|
63
|
+
trait S3Key {
|
64
|
+
val key: String
|
65
|
+
override def toString = key
|
66
|
+
}
|
67
|
+
|
68
|
+
object S3Key {
|
69
|
+
def prefix(s3_key_prefix: Option[String]) = s3_key_prefix.map(prefix => if (prefix.endsWith("/")) prefix else prefix + "/").getOrElse("")
|
70
|
+
|
71
|
+
def isIgnoredBecauseOfPrefix(s3Key: S3Key)(implicit site: Site) = s3Key.key.startsWith(prefix(site.config.s3_key_prefix))
|
72
|
+
|
73
|
+
case class S3KeyClass(key: String) extends S3Key
|
74
|
+
def build(key: String, s3_key_prefix: Option[String]): S3Key = S3KeyClass(prefix(s3_key_prefix) + key)
|
75
|
+
}
|
76
|
+
|
77
|
+
case class S3KeyGlob[T](globs: Map[String, T]) {
|
78
|
+
def globMatch(s3Key: S3Key): Option[T] = {
|
79
|
+
def respectMostSpecific(globs: Map[String, T]) = globs.toSeq.sortBy(_._1.length).reverse
|
80
|
+
val matcher = (glob: String, value: T) =>
|
81
|
+
rubyRuntime.evalScriptlet(
|
82
|
+
s"""|# encoding: utf-8
|
83
|
+
|File.fnmatch('$glob', "$s3Key")""".stripMargin)
|
84
|
+
.toJava(classOf[Boolean])
|
85
|
+
.asInstanceOf[Boolean]
|
86
|
+
val fileGlobMatch = respectMostSpecific(globs) find Function.tupled(matcher)
|
87
|
+
fileGlobMatch map (_._2)
|
88
|
+
}
|
89
|
+
}
|
90
|
+
|
91
|
+
case class S3KeyRegexes(s3KeyRegexes: Seq[S3KeyRegex]) {
|
92
|
+
def matches(s3Key: S3Key) = s3KeyRegexes exists (
|
93
|
+
(keyRegex: S3KeyRegex) => keyRegex matches s3Key
|
94
|
+
)
|
95
|
+
}
|
96
|
+
|
97
|
+
type UploadDuration = Long
|
98
|
+
|
99
|
+
trait PushAction {
|
100
|
+
def actionName = getClass.getSimpleName.replace("$", "") // case object class names contain the '$' char
|
101
|
+
|
102
|
+
def renderVerb(implicit pushOptions: PushOptions): String =
|
103
|
+
if (pushOptions.dryRun)
|
104
|
+
s"Would have ${actionName.toLowerCase}"
|
105
|
+
else
|
106
|
+
s"$actionName"
|
107
|
+
}
|
108
|
+
case object Created extends PushAction
|
109
|
+
case object Updated extends PushAction
|
110
|
+
case object Redirected extends PushAction
|
111
|
+
case object Deleted extends PushAction
|
112
|
+
case object Transferred extends PushAction
|
113
|
+
case object Invalidated extends PushAction
|
114
|
+
case object Applied extends PushAction
|
115
|
+
case object PushNothing extends PushAction {
|
116
|
+
override def renderVerb(implicit pushOptions: PushOptions) =
|
117
|
+
if (pushOptions.dryRun)
|
118
|
+
s"Would have pushed nothing"
|
119
|
+
else
|
120
|
+
s"There was nothing to push"
|
121
|
+
}
|
122
|
+
case object Deploy extends PushAction {
|
123
|
+
override def renderVerb(implicit pushOptions: PushOptions) =
|
124
|
+
if (pushOptions.dryRun)
|
125
|
+
s"Simulating the deployment of"
|
126
|
+
else
|
127
|
+
s"Deploying"
|
128
|
+
}
|
129
|
+
|
130
|
+
type PushErrorOrSuccess = Either[PushFailureReport, PushSuccessReport]
|
131
|
+
|
132
|
+
type Attempt = Int
|
133
|
+
|
134
|
+
type MD5 = String
|
135
|
+
|
136
|
+
def retry[L <: Report, R](attempt: Attempt)
|
137
|
+
(createFailureReport: (Throwable) => L, retryAction: (Attempt) => Future[Either[L, R]])
|
138
|
+
(implicit retrySetting: RetrySetting, ec: ExecutionContextExecutor, logger: Logger):
|
139
|
+
PartialFunction[Throwable, Future[Either[L, R]]] = {
|
140
|
+
case error: Throwable if attempt == 6 || isIrrecoverable(error) =>
|
141
|
+
val failureReport = createFailureReport(error)
|
142
|
+
logger.fail(failureReport.reportMessage)
|
143
|
+
Future(Left(failureReport))
|
144
|
+
case error: Throwable =>
|
145
|
+
val failureReport = createFailureReport(error)
|
146
|
+
val sleepDuration = Duration(fibs.drop(attempt + 1).head, retrySetting.retryTimeUnit)
|
147
|
+
logger.pending(s"${failureReport.reportMessage}. Trying again in $sleepDuration.")
|
148
|
+
Thread.sleep(sleepDuration.toMillis)
|
149
|
+
retryAction(attempt + 1)
|
150
|
+
}
|
151
|
+
|
152
|
+
def isIrrecoverable(error: Throwable) = {
|
153
|
+
val httpStatusCode =
|
154
|
+
error match {
|
155
|
+
case exception: AmazonServiceException => Some(exception.getStatusCode)
|
156
|
+
case _ => None
|
157
|
+
}
|
158
|
+
val isAwsTimeoutException =
|
159
|
+
error match {
|
160
|
+
case exception: AmazonServiceException =>
|
161
|
+
// See http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList
|
162
|
+
exception.getErrorCode == "RequestTimeout"
|
163
|
+
case _ => false
|
164
|
+
}
|
165
|
+
httpStatusCode.exists(c => c >= 400 && c < 500) && !isAwsTimeoutException
|
166
|
+
}
|
167
|
+
|
168
|
+
implicit class NumReport(val num: Int) extends AnyVal {
|
169
|
+
def ofType(itemType: String) = countToString(num, itemType)
|
170
|
+
|
171
|
+
private def countToString(count: Int, singular: String) = {
|
172
|
+
def plural = s"${singular}s"
|
173
|
+
s"$count ${if (count > 1) plural else singular}"
|
174
|
+
}
|
175
|
+
}
|
176
|
+
|
177
|
+
implicit def site2Config(implicit site: Site): Config = site.config
|
178
|
+
|
179
|
+
type ErrorOrFile = Either[ErrorReport, File]
|
180
|
+
|
181
|
+
lazy val fibs: Stream[Int] = 0 #:: 1 #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
|
182
|
+
}
|
@@ -0,0 +1,15 @@
|
|
1
|
+
package s3.website
|
2
|
+
|
3
|
+
import com.amazonaws.http.AmazonHttpClient
|
4
|
+
import org.apache.commons.logging.LogFactory
|
5
|
+
import org.specs2.mutable.Specification
|
6
|
+
|
7
|
+
class AwsSdkSpec extends Specification {
|
8
|
+
|
9
|
+
"AWS SDK" should {
|
10
|
+
"not log INFO level messages" in {
|
11
|
+
// See https://github.com/laurilehmijoki/s3_website/issues/104 for discussion
|
12
|
+
LogFactory.getLog(classOf[AmazonHttpClient]).isInfoEnabled must beFalse
|
13
|
+
}
|
14
|
+
}
|
15
|
+
}
|