s3_website_monadic 0.0.31 → 0.0.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,19 +2,15 @@ package s3.website.model
2
2
 
3
3
  import com.amazonaws.services.s3.model.S3ObjectSummary
4
4
  import java.io._
5
- import scala.util.Try
6
- import s3.website.model.Encoding._
7
5
  import org.apache.commons.codec.digest.DigestUtils
8
6
  import java.util.zip.GZIPOutputStream
9
- import org.apache.commons.io.IOUtils
10
7
  import org.apache.tika.Tika
11
8
  import s3.website.Ruby._
12
9
  import s3.website._
13
- import s3.website.model.Encoding.Gzip
14
- import scala.util.Failure
15
- import scala.Some
16
- import scala.util.Success
17
- import s3.website.model.Encoding.Zopfli
10
+ import s3.website.model.LocalFileFromDisk.tika
11
+ import s3.website.model.Encoding.encodingOnS3
12
+ import java.io.File.createTempFile
13
+ import org.apache.commons.io.IOUtils.copy
18
14
 
19
15
  object Encoding {
20
16
 
@@ -23,104 +19,52 @@ object Encoding {
23
19
  case class Gzip()
24
20
  case class Zopfli()
25
21
 
26
- def encodingOnS3(path: String)(implicit site: Site): Option[Either[Gzip, Zopfli]] =
27
- site.config.gzip.flatMap { (gzipSetting: Either[Boolean, Seq[String]]) =>
22
+ def encodingOnS3(s3Key: String)(implicit config: Config): Option[Either[Gzip, Zopfli]] =
23
+ config.gzip.flatMap { (gzipSetting: Either[Boolean, Seq[String]]) =>
28
24
  val shouldZipThisFile = gzipSetting.fold(
29
- shouldGzip => defaultGzipExtensions exists path.endsWith,
30
- fileExtensions => fileExtensions exists path.endsWith
25
+ shouldGzip => defaultGzipExtensions exists s3Key.endsWith,
26
+ fileExtensions => fileExtensions exists s3Key.endsWith
31
27
  )
32
- if (shouldZipThisFile && site.config.gzip_zopfli.isDefined)
28
+ if (shouldZipThisFile && config.gzip_zopfli.isDefined)
33
29
  Some(Right(Zopfli()))
34
30
  else if (shouldZipThisFile)
35
31
  Some(Left(Gzip()))
36
32
  else
37
33
  None
38
34
  }
39
-
40
- type MD5 = String
41
- }
42
-
43
- sealed trait S3KeyProvider {
44
- def s3Key: String
45
- }
46
-
47
- trait UploadTypeResolved {
48
- def uploadType: UploadType
49
35
  }
50
36
 
51
37
  sealed trait UploadType // Sealed, so that we can avoid inexhaustive pattern matches more easily
52
38
 
53
39
  case object NewFile extends UploadType
54
- case object Update extends UploadType
40
+ case object FileUpdate extends UploadType
41
+ case object RedirectFile extends UploadType
42
+
43
+ case class LocalFileFromDisk(originalFile: File, uploadType: UploadType)(implicit site: Site) {
44
+ lazy val s3Key = site.resolveS3Key(originalFile)
55
45
 
56
- case class LocalFile(
57
- s3Key: String,
58
- originalFile: File,
59
- encodingOnS3: Option[Either[Gzip, Zopfli]]
60
- ) extends S3KeyProvider {
46
+ lazy val encodingOnS3 = Encoding.encodingOnS3(s3Key)
61
47
 
62
- // May throw an exception, so remember to call this in a Try or Future monad
63
- lazy val length = uploadFile.length()
48
+ lazy val lastModified = originalFile.lastModified
64
49
 
65
50
  /**
66
51
  * This is the file we should upload, because it contains the potentially gzipped contents of the original file.
67
52
  *
68
53
  * May throw an exception, so remember to call this in a Try or Future monad
69
54
  */
70
- lazy val uploadFile: File = encodingOnS3
71
- .fold(originalFile)(algorithm => {
72
- val tempFile = File.createTempFile(originalFile.getName, "gzip")
73
- tempFile.deleteOnExit()
74
- using(new GZIPOutputStream(new FileOutputStream(tempFile))) { stream =>
75
- IOUtils.copy(fis(originalFile), stream)
76
- }
77
- tempFile
78
- })
79
-
80
- /**
81
- * May throw an exception, so remember to call this in a Try or Future monad
82
- */
83
- lazy val md5 = using(fis(uploadFile)) { inputStream =>
84
- DigestUtils.md5Hex(inputStream)
85
- }
55
+ lazy val uploadFile: File = LocalFileFromDisk uploadFile originalFile
86
56
 
87
- private[this] def fis(file: File): InputStream = new FileInputStream(file)
88
- private[this] def using[T <: Closeable, R](cl: T)(f: (T) => R): R = try f(cl) finally cl.close()
89
- }
90
-
91
- object LocalFile {
92
- def toUpload(localFile: LocalFile)(implicit config: Config): Either[ErrorReport, Upload] = Try {
93
- Upload(
94
- s3Key = localFile.s3Key,
95
- essence = Right(
96
- UploadBody(
97
- md5 = localFile.md5,
98
- contentEncoding = localFile.encodingOnS3.map(_ => "gzip"),
99
- contentLength = localFile.length,
100
- maxAge = resolveMaxAge(localFile),
101
- contentType = resolveContentType(localFile.originalFile),
102
- openInputStream = () => new FileInputStream(localFile.uploadFile)
103
- )
104
- )
105
- )
106
- } match {
107
- case Success(upload) => Right(upload)
108
- case Failure(error) => Left(IOError(error))
109
- }
110
-
111
- lazy val tika = new Tika()
112
-
113
- def resolveContentType(file: File) = {
114
- val mimeType = tika.detect(file)
57
+ lazy val contentType = {
58
+ val mimeType = tika.detect(originalFile)
115
59
  if (mimeType.startsWith("text/") || mimeType == "application/json")
116
60
  mimeType + "; charset=utf-8"
117
61
  else
118
62
  mimeType
119
63
  }
120
64
 
121
- def resolveMaxAge(localFile: LocalFile)(implicit config: Config): Option[Int] = {
65
+ lazy val maxAge: Option[Int] = {
122
66
  type GlobsMap = Map[String, Int]
123
- config.max_age.flatMap { (intOrGlobs: Either[Int, GlobsMap]) =>
67
+ site.config.max_age.flatMap { (intOrGlobs: Either[Int, GlobsMap]) =>
124
68
  type GlobsSeq = Seq[(String, Int)]
125
69
  def respectMostSpecific(globs: GlobsMap): GlobsSeq = globs.toSeq.sortBy(_._1.length).reverse
126
70
  intOrGlobs
@@ -129,7 +73,7 @@ object LocalFile {
129
73
  (seconds: Int) => Some(seconds),
130
74
  (globs: GlobsSeq) =>
131
75
  globs.find { globAndInt =>
132
- (rubyRuntime evalScriptlet s"File.fnmatch('${globAndInt._1}', '${localFile.s3Key}')")
76
+ (rubyRuntime evalScriptlet s"File.fnmatch('${globAndInt._1}', '$s3Key')")
133
77
  .toJava(classOf[Boolean])
134
78
  .asInstanceOf[Boolean]
135
79
  } map (_._2)
@@ -137,83 +81,71 @@ object LocalFile {
137
81
  }
138
82
  }
139
83
 
140
- def resolveLocalFiles(implicit site: Site, logger: Logger): Either[ErrorReport, Seq[LocalFile]] = Try {
141
- val files = recursiveListFiles(new File(site.rootDirectory)).filterNot(_.isDirectory)
142
- files map { file =>
143
- val s3Key = site.resolveS3Key(file)
144
- LocalFile(s3Key, file, encodingOnS3(s3Key))
145
- } filterNot { file =>
146
- val excludeFile = site.config.exclude_from_upload exists { _.fold(
147
- // For backward compatibility, use Ruby regex matching
148
- (exclusionRegex: String) => rubyRegexMatches(file.s3Key, exclusionRegex),
149
- (exclusionRegexes: Seq[String]) => exclusionRegexes exists (rubyRegexMatches(file.s3Key, _))
150
- ) }
151
- if (excludeFile) logger.debug(s"Excluded ${file.s3Key} from upload")
152
- excludeFile
153
- } filterNot { _.originalFile.getName == "s3_website.yml" } // For security reasons, the s3_website.yml should never be pushed
154
- } match {
155
- case Success(localFiles) =>
156
- Right(
157
- // Sort by key, because this will improve the performance when pushing existing sites.
158
- // The lazy-loading diff take advantage of this arrangement.
159
- localFiles sortBy (_.s3Key)
160
- )
161
- case Failure(error) =>
162
- Left(IOError(error))
163
- }
84
+ /**
85
+ * May throw an exception, so remember to call this in a Try or Future monad
86
+ */
87
+ lazy val md5 = LocalFileFromDisk md5 originalFile
88
+ }
89
+
90
+ object LocalFileFromDisk {
91
+ lazy val tika = new Tika()
92
+
93
+ def md5(originalFile: File)(implicit site: Site) = using(fis { uploadFile(originalFile) }) { DigestUtils.md5Hex }
94
+
95
+ def uploadFile(originalFile: File)(implicit site: Site): File =
96
+ encodingOnS3(site resolveS3Key originalFile)
97
+ .fold(originalFile)(algorithm => {
98
+ val tempFile = createTempFile(originalFile.getName, "gzip")
99
+ tempFile.deleteOnExit()
100
+ using(new GZIPOutputStream(new FileOutputStream(tempFile))) { stream =>
101
+ copy(fis(originalFile), stream)
102
+ }
103
+ tempFile
104
+ })
105
+
106
+ private[this] def fis(file: File): InputStream = new FileInputStream(file)
107
+ private[this] def using[T <: Closeable, R](cl: T)(f: (T) => R): R = try f(cl) finally cl.close()
108
+ }
164
109
 
110
+ object Files {
165
111
  def recursiveListFiles(f: File): Seq[File] = {
166
112
  val these = f.listFiles
167
113
  these ++ these.filter(_.isDirectory).flatMap(recursiveListFiles)
168
114
  }
169
- }
170
-
171
- case class Redirect(key: String, redirectTarget: String)
172
115
 
173
- object Redirect extends UploadType {
174
- def resolveRedirects(implicit config: Config): Seq[Upload with UploadTypeResolved] = {
175
- val redirects = config.redirects.fold(Nil: Seq[Redirect]) {
176
- sourcesToTargets =>
177
- sourcesToTargets.foldLeft(Seq(): Seq[Redirect]) {
178
- (redirects, sourceToTarget) =>
179
- redirects :+ Redirect(sourceToTarget._1, sourceToTarget._2)
180
- }
181
- }
182
- redirects.map { redirect =>
183
- Upload.apply(redirect)
116
+ def listSiteFiles(implicit site: Site, logger: Logger) = {
117
+ def excludeFromUpload(s3Key: String) = {
118
+ val excludeByConfig = site.config.exclude_from_upload exists {
119
+ _.fold(
120
+ // For backward compatibility, use Ruby regex matching
121
+ (exclusionRegex: String) => rubyRegexMatches(s3Key, exclusionRegex),
122
+ (exclusionRegexes: Seq[String]) => exclusionRegexes exists (rubyRegexMatches(s3Key, _))
123
+ )
124
+ }
125
+ val doNotUpload = excludeByConfig || s3Key == "s3_website.yml"
126
+ if (doNotUpload) logger.debug(s"Excluded $s3Key from upload")
127
+ doNotUpload
184
128
  }
129
+ recursiveListFiles(new File(site.rootDirectory))
130
+ .filterNot(_.isDirectory)
131
+ .filterNot(f => excludeFromUpload(site.resolveS3Key(f)))
185
132
  }
186
133
  }
187
134
 
188
- case class Upload(
189
- s3Key: String,
190
- essence: Either[Redirect, UploadBody]
191
- ) extends S3KeyProvider {
192
-
193
- def withUploadType(ut: UploadType) =
194
- new Upload(s3Key, essence) with UploadTypeResolved {
195
- def uploadType = ut
196
- }
135
+ case class Redirect(s3Key: String, redirectTarget: String) {
136
+ def uploadType = RedirectFile
197
137
  }
198
138
 
199
- object Upload {
200
- def apply(redirect: Redirect): Upload with UploadTypeResolved = new Upload(redirect.key, Left(redirect)) with UploadTypeResolved {
201
- def uploadType = Redirect
139
+ object Redirect {
140
+ def resolveRedirects(implicit config: Config): Seq[Redirect] =
141
+ config.redirects.fold(Nil: Seq[Redirect]) { sourcesToTargets =>
142
+ sourcesToTargets.foldLeft(Seq(): Seq[Redirect]) {
143
+ (redirects, sourceToTarget) =>
144
+ redirects :+ Redirect(sourceToTarget._1, sourceToTarget._2)
145
+ }
202
146
  }
203
147
  }
204
148
 
205
- /**
206
- * Represents a bunch of data that should be stored into an S3 objects body.
207
- */
208
- case class UploadBody(
209
- md5: MD5,
210
- contentLength: Long,
211
- contentEncoding: Option[String],
212
- maxAge: Option[Int],
213
- contentType: String,
214
- openInputStream: () => InputStream // It's in the caller's responsibility to close this stream
215
- )
216
-
217
149
  case class S3File(s3Key: String, md5: MD5)
218
150
 
219
151
  object S3File {
@@ -19,7 +19,7 @@ object Ssg {
19
19
  }
20
20
 
21
21
  def findSiteDirectory(workingDirectory: File): ErrorOrFile =
22
- LocalFile.recursiveListFiles(workingDirectory).find { file =>
22
+ Files.recursiveListFiles(workingDirectory).find { file =>
23
23
  file.isDirectory && automaticallySupportedSiteGenerators.exists(_.outputDirectory == file.getName)
24
24
  }.fold(Left(notFoundErrorReport): ErrorOrFile)(Right(_))
25
25
  }
@@ -2,7 +2,6 @@ package s3
2
2
 
3
3
  import scala.concurrent.{ExecutionContextExecutor, Future}
4
4
  import scala.concurrent.duration.{TimeUnit, Duration}
5
- import s3.website.Utils._
6
5
  import s3.website.S3.{PushSuccessReport, PushFailureReport}
7
6
  import com.amazonaws.AmazonServiceException
8
7
  import s3.website.model.{Config, Site}
@@ -18,6 +17,23 @@ package object website {
18
17
 
19
18
  trait ErrorReport extends Report
20
19
 
20
+ object ErrorReport {
21
+ def apply(t: Throwable)(implicit logger: Logger) = new ErrorReport {
22
+ override def reportMessage = {
23
+ val extendedReport =
24
+ if (logger.verboseOutput)
25
+ Some(t.getStackTrace take 5)
26
+ else
27
+ None
28
+ s"${t.getMessage}${extendedReport.fold("")(stackTraceElems => "\n" + stackTraceElems.mkString("\n"))}"
29
+ }
30
+ }
31
+
32
+ def apply(msg: String) = new ErrorReport {
33
+ override def reportMessage = msg
34
+ }
35
+ }
36
+
21
37
  trait RetrySetting {
22
38
  def retryTimeUnit: TimeUnit
23
39
  }
@@ -26,6 +42,8 @@ package object website {
26
42
  def dryRun: Boolean
27
43
  }
28
44
 
45
+ type S3Key = String
46
+
29
47
  trait PushAction {
30
48
  def actionName = getClass.getSimpleName.replace("$", "") // case object class names contain the '$' char
31
49
 
@@ -61,6 +79,8 @@ package object website {
61
79
 
62
80
  type Attempt = Int
63
81
 
82
+ type MD5 = String
83
+
64
84
  def retry[L <: Report, R](attempt: Attempt)
65
85
  (createFailureReport: (Throwable) => L, retryAction: (Attempt) => Future[Either[L, R]])
66
86
  (implicit retrySetting: RetrySetting, ec: ExecutionContextExecutor, logger: Logger):
@@ -98,4 +118,6 @@ package object website {
98
118
  implicit def site2Config(implicit site: Site): Config = site.config
99
119
 
100
120
  type ErrorOrFile = Either[ErrorReport, File]
121
+
122
+ lazy val fibs: Stream[Int] = 0 #:: 1 #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
101
123
  }
@@ -14,7 +14,7 @@ import scala.concurrent.duration._
14
14
  import s3.website.S3.S3Setting
15
15
  import scala.collection.JavaConversions._
16
16
  import com.amazonaws.AmazonServiceException
17
- import org.apache.commons.codec.digest.DigestUtils.md5Hex
17
+ import org.apache.commons.codec.digest.DigestUtils._
18
18
  import s3.website.CloudFront.CloudFrontSetting
19
19
  import com.amazonaws.services.cloudfront.AmazonCloudFront
20
20
  import com.amazonaws.services.cloudfront.model.{CreateInvalidationResult, CreateInvalidationRequest, TooManyInvalidationsInProgressException}
@@ -24,6 +24,9 @@ import java.util.concurrent.atomic.AtomicInteger
24
24
  import org.apache.commons.io.FileUtils.{forceDelete, forceMkdir, write}
25
25
  import scala.collection.mutable
26
26
  import s3.website.Push.{push, CliArgs}
27
+ import s3.website.CloudFront.CloudFrontSetting
28
+ import s3.website.S3.S3Setting
29
+ import org.apache.commons.codec.digest.DigestUtils
27
30
 
28
31
  class S3WebsiteSpec extends Specification {
29
32
 
@@ -517,6 +520,48 @@ class S3WebsiteSpec extends Specification {
517
520
  }
518
521
  }
519
522
 
523
+ // Because of the local database, the first and second run are implemented differently.
524
+ "pushing files for the second time" should {
525
+ "delete the S3 objects that no longer exist on the local site" in new AllInSameDirectory with EmptySite with MockAWS with DefaultRunMode {
526
+ push
527
+ setS3Files(S3File("obsolete.txt", ""))
528
+ push
529
+ sentDelete must equalTo("obsolete.txt")
530
+ }
531
+
532
+ "push new files to the bucket" in new AllInSameDirectory with EmptySite with MockAWS with DefaultRunMode {
533
+ push
534
+ setLocalFile("newfile.txt")
535
+ push
536
+ sentPutObjectRequest.getKey must equalTo("newfile.txt")
537
+ }
538
+
539
+ "push locally changed files" in new AllInSameDirectory with EmptySite with MockAWS with DefaultRunMode {
540
+ setLocalFileWithContent(("file.txt", "first run"))
541
+ push
542
+ setLocalFileWithContent(("file.txt", "second run"))
543
+ push
544
+ sentPutObjectRequests.length must equalTo(2)
545
+ }
546
+
547
+ "push locally changed files only once" in new AllInSameDirectory with EmptySite with MockAWS with DefaultRunMode {
548
+ setLocalFileWithContent(("file.txt", "first run"))
549
+ push
550
+ setS3Files(S3File("file.txt", md5Hex("first run")))
551
+ setLocalFileWithContent(("file.txt", "second run"))
552
+ push
553
+ sentPutObjectRequests.length must equalTo(2)
554
+ }
555
+
556
+ "detect files that someone else has changed on the S3 bucket" in new AllInSameDirectory with EmptySite with MockAWS with DefaultRunMode {
557
+ setLocalFileWithContent(("file.txt", "first run"))
558
+ push
559
+ setOutdatedS3Keys("file.txt")
560
+ push
561
+ sentPutObjectRequests.length must equalTo(2)
562
+ }
563
+ }
564
+
520
565
  "Jekyll site" should {
521
566
  "be detected automatically" in new JekyllSite with EmptySite with MockAWS with DefaultRunMode {
522
567
  setLocalFile("index.html")
@@ -637,12 +682,6 @@ class S3WebsiteSpec extends Specification {
637
682
  .listObjects(Matchers.any(classOf[ListObjectsRequest]))
638
683
  }
639
684
 
640
- def asSeenByS3Client(upload: Upload)(implicit config: Config, logger: Logger): PutObjectRequest = {
641
- val req = ArgumentCaptor.forClass(classOf[PutObjectRequest])
642
- verify(amazonS3Client).putObject(req.capture())
643
- req.getValue
644
- }
645
-
646
685
  def sentPutObjectRequests: Seq[PutObjectRequest] = {
647
686
  val req = ArgumentCaptor.forClass(classOf[PutObjectRequest])
648
687
  verify(amazonS3Client, Mockito.atLeast(1)).putObject(req.capture())
@@ -688,15 +727,14 @@ class S3WebsiteSpec extends Specification {
688
727
  implicit final val workingDirectory: File = new File(FileUtils.getTempDirectory, "s3_website_dir" + Random.nextLong())
689
728
  val siteDirectory: File // Represents the --site=X option
690
729
  val configDirectory: File = workingDirectory // Represents the --config-dir=X option
691
-
692
- lazy val allDirectories = workingDirectory :: siteDirectory :: configDirectory :: Nil
730
+ lazy val localDatabase: File = new File(FileUtils.getTempDirectory, "s3_website_local_db_" + sha256Hex(siteDirectory.getPath))
693
731
 
694
732
  def before {
695
- allDirectories foreach forceMkdir
733
+ workingDirectory :: siteDirectory :: configDirectory :: Nil foreach forceMkdir
696
734
  }
697
735
 
698
736
  def after {
699
- allDirectories foreach { dir =>
737
+ (workingDirectory :: siteDirectory :: configDirectory :: localDatabase :: Nil) foreach { dir =>
700
738
  if (dir.exists) forceDelete(dir)
701
739
  }
702
740
  }
@@ -720,7 +758,7 @@ class S3WebsiteSpec extends Specification {
720
758
  trait EmptySite extends Directories {
721
759
  type LocalFileWithContent = (String, String)
722
760
 
723
- val localFilesWithContent: mutable.Buffer[LocalFileWithContent] = mutable.Buffer()
761
+ val localFilesWithContent: mutable.Set[LocalFileWithContent] = mutable.Set()
724
762
  def setLocalFile(fileName: String) = setLocalFileWithContent((fileName, ""))
725
763
  def setLocalFiles(fileNames: String*) = fileNames foreach setLocalFile
726
764
  def setLocalFileWithContent(fileNameAndContent: LocalFileWithContent) = localFilesWithContent += fileNameAndContent
@@ -733,10 +771,22 @@ class S3WebsiteSpec extends Specification {
733
771
  |s3_bucket: bucket
734
772
  """.stripMargin
735
773
 
736
- implicit lazy val cliArgs: CliArgs = siteWithFilesAndContent(config, localFilesWithContent)
774
+ implicit def cliArgs: CliArgs = siteWithFilesAndContent(config, localFilesWithContent)
737
775
  def pushMode: PushMode // Represents the --dry-run switch
738
776
 
739
- def buildSite(
777
+ private def siteWithFilesAndContent(config: String = "", localFilesWithContent: mutable.Set[LocalFileWithContent]): CliArgs = {
778
+ localFilesWithContent.foreach {
779
+ case (filePath, content) =>
780
+ val file = new File(siteDirectory, filePath)
781
+ forceMkdir(file.getParentFile)
782
+ file.createNewFile()
783
+ write(file, content)
784
+ localFilesWithContent remove(filePath, content) // Remove the file from the set once we've persisted it on the disk.
785
+ }
786
+ buildCliArgs(config)
787
+ }
788
+
789
+ private def buildCliArgs(
740
790
  config: String = "",
741
791
  baseConfig: String =
742
792
  """
@@ -762,16 +812,5 @@ class S3WebsiteSpec extends Specification {
762
812
  override def configDir = configDirectory.getAbsolutePath
763
813
  }
764
814
  }
765
-
766
- def siteWithFilesAndContent(config: String = "", localFilesWithContent: Seq[LocalFileWithContent]): CliArgs = {
767
- localFilesWithContent.foreach {
768
- case (filePath, content) =>
769
- val file = new File(siteDirectory, filePath)
770
- forceMkdir(file.getParentFile)
771
- file.createNewFile()
772
- write(file, content)
773
- }
774
- buildSite(config)
775
- }
776
815
  }
777
816
  }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3_website_monadic
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.31
4
+ version: 0.0.32
5
5
  platform: ruby
6
6
  authors:
7
7
  - Lauri Lehmijoki
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-05-14 00:00:00.000000000 Z
11
+ date: 2014-05-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: thor
@@ -113,14 +113,13 @@ files:
113
113
  - src/main/java/s3/website/ByteHelper.java
114
114
  - src/main/scala/s3/website/CloudFront.scala
115
115
  - src/main/scala/s3/website/Diff.scala
116
+ - src/main/scala/s3/website/Logger.scala
116
117
  - src/main/scala/s3/website/Push.scala
117
118
  - src/main/scala/s3/website/Ruby.scala
118
119
  - src/main/scala/s3/website/S3.scala
119
- - src/main/scala/s3/website/Utils.scala
120
120
  - src/main/scala/s3/website/model/Config.scala
121
121
  - src/main/scala/s3/website/model/S3Endpoint.scala
122
122
  - src/main/scala/s3/website/model/Site.scala
123
- - src/main/scala/s3/website/model/errors.scala
124
123
  - src/main/scala/s3/website/model/push.scala
125
124
  - src/main/scala/s3/website/model/ssg.scala
126
125
  - src/main/scala/s3/website/package.scala
@@ -1,108 +0,0 @@
1
- package s3.website
2
-
3
- import s3.website.model.Config
4
- import scala.collection.parallel.{ForkJoinTaskSupport, ParSeq}
5
- import scala.concurrent.forkjoin.ForkJoinPool
6
-
7
- class Utils(implicit config: Config) {
8
- def toParSeq[T](seq: Seq[T]): ParSeq[T] = {
9
- val parallelSeq: ParSeq[T] = seq.par
10
- parallelSeq.tasksupport_=(new ForkJoinTaskSupport(new ForkJoinPool(config.concurrency_level)))
11
- parallelSeq
12
- }
13
- }
14
-
15
- object Utils {
16
- lazy val fibs: Stream[Int] = 0 #:: 1 #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
17
- }
18
-
19
- class Logger(val verboseOutput: Boolean, logMessage: (String) => Unit = println) {
20
- import Rainbow._
21
- def debug(msg: String) = if (verboseOutput) log(Debug, msg)
22
- def info(msg: String) = log(Info, msg)
23
- def fail(msg: String) = log(Failure, msg)
24
-
25
- def info(report: SuccessReport) = log(Success, report.reportMessage)
26
- def info(report: FailureReport) = fail(report.reportMessage)
27
-
28
- def pending(msg: String) = log(Wait, msg)
29
-
30
- private def log(logType: LogType, msgRaw: String) {
31
- val msg = msgRaw.replaceAll("\\n", "\n ") // Indent new lines, so that they arrange nicely with other log lines
32
- logMessage(s"[$logType] $msg")
33
- }
34
-
35
- sealed trait LogType {
36
- val prefix: String
37
- override def toString = prefix
38
- }
39
- case object Debug extends LogType {
40
- val prefix = "debg".cyan
41
- }
42
- case object Info extends LogType {
43
- val prefix = "info".blue
44
- }
45
- case object Success extends LogType {
46
- val prefix = "succ".green
47
- }
48
- case object Failure extends LogType {
49
- val prefix = "fail".red
50
- }
51
- case object Wait extends LogType {
52
- val prefix = "wait".yellow
53
- }
54
- }
55
-
56
- /**
57
- * Idea copied from https://github.com/ktoso/scala-rainbow.
58
- */
59
- object Rainbow {
60
- implicit class RainbowString(val s: String) extends AnyVal {
61
- import Console._
62
-
63
- /** Colorize the given string foreground to ANSI black */
64
- def black = BLACK + s + RESET
65
- /** Colorize the given string foreground to ANSI red */
66
- def red = RED + s + RESET
67
- /** Colorize the given string foreground to ANSI red */
68
- def green = GREEN + s + RESET
69
- /** Colorize the given string foreground to ANSI red */
70
- def yellow = YELLOW + s + RESET
71
- /** Colorize the given string foreground to ANSI red */
72
- def blue = BLUE + s + RESET
73
- /** Colorize the given string foreground to ANSI red */
74
- def magenta = MAGENTA + s + RESET
75
- /** Colorize the given string foreground to ANSI red */
76
- def cyan = CYAN + s + RESET
77
- /** Colorize the given string foreground to ANSI red */
78
- def white = WHITE + s + RESET
79
-
80
- /** Colorize the given string background to ANSI red */
81
- def onBlack = BLACK_B + s + RESET
82
- /** Colorize the given string background to ANSI red */
83
- def onRed = RED_B+ s + RESET
84
- /** Colorize the given string background to ANSI red */
85
- def onGreen = GREEN_B+ s + RESET
86
- /** Colorize the given string background to ANSI red */
87
- def onYellow = YELLOW_B + s + RESET
88
- /** Colorize the given string background to ANSI red */
89
- def onBlue = BLUE_B+ s + RESET
90
- /** Colorize the given string background to ANSI red */
91
- def onMagenta = MAGENTA_B + s + RESET
92
- /** Colorize the given string background to ANSI red */
93
- def onCyan = CYAN_B+ s + RESET
94
- /** Colorize the given string background to ANSI red */
95
- def onWhite = WHITE_B+ s + RESET
96
-
97
- /** Make the given string bold */
98
- def bold = BOLD + s + RESET
99
- /** Underline the given string */
100
- def underlined = UNDERLINED + s + RESET
101
- /** Make the given string blink (some terminals may turn this off) */
102
- def blink = BLINK + s + RESET
103
- /** Reverse the ANSI colors of the given string */
104
- def reversed = REVERSED + s + RESET
105
- /** Make the given string invisible using ANSI color codes */
106
- def invisible = INVISIBLE + s + RESET
107
- }
108
- }
@@ -1,9 +0,0 @@
1
- package s3.website.model
2
-
3
- import s3.website.ErrorReport
4
-
5
- case class ClientError(reportMessage: String) extends ErrorReport
6
-
7
- case class IOError(exception: Throwable) extends ErrorReport {
8
- def reportMessage = exception.getMessage
9
- }