s3_website 2.14.0 → 2.14.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +12 -1
- data/changelog.md +6 -0
- data/lib/s3_website/version.rb +1 -1
- data/resources/s3_website.jar.md5 +1 -1
- data/src/main/scala/s3/website/model/push.scala +30 -9
- data/src/test/scala/s3/website/S3WebsiteSpec.scala +60 -4
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 3bbec93340ef359945d7d3cfbe80883d68375991
|
4
|
+
data.tar.gz: 5bdcd5868373b3864e4dd5e5632cf3776a4c2486
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8c1ce8a06185665f9c5069ef5a116c596928bed0b0045e8ab8fb4ca0a2e83c40897229b8581705f15907840cca5d2bbffc97d6d9735d1f1444645fcbd7b44cf9
|
7
|
+
data.tar.gz: 626d786f95ed0678bc13fddc544c37a8775f7e3856d7dbb9e158da8ac06fee042288323f4b5296e24ee517ea04ca8f93f98486abd698fef407902762df74c674
|
data/README.md
CHANGED
@@ -72,6 +72,11 @@ in the project's root directory to take advantage of this feature. Please have
|
|
72
72
|
a look at [dotenv's usage guide](https://github.com/bkeepers/dotenv#usage) for
|
73
73
|
syntax information.
|
74
74
|
|
75
|
+
Your `.env` file should containing the following variables:
|
76
|
+
|
77
|
+
AWS_ACCESS_KEY_ID=FOO
|
78
|
+
AWS_SECRET_ACCESS_KEY=BAR
|
79
|
+
|
75
80
|
## Project goals
|
76
81
|
|
77
82
|
* Provide a command-line interface tool for deploying and managing S3 websites
|
@@ -169,6 +174,9 @@ not the pre-processed extensions.
|
|
169
174
|
|
170
175
|
After changing the `gzip` setting, push with the `--force` option.
|
171
176
|
|
177
|
+
s3_website will not gzip a file that is already gzipped. This is useful in the
|
178
|
+
situations where your build tools gzip a file before you invoke `s3_website push`.
|
179
|
+
|
172
180
|
### Using non-standard AWS regions
|
173
181
|
|
174
182
|
By default, `s3_website` uses the US Standard Region. You can upload your
|
@@ -185,7 +193,10 @@ s3_endpoint: ap-northeast-1
|
|
185
193
|
The valid `s3_endpoint` values consist of the [S3 location constraint
|
186
194
|
values](http://docs.amazonwebservices.com/general/latest/gr/rande.html#s3_region).
|
187
195
|
|
188
|
-
Note that at the moment s3_website does not support
|
196
|
+
Note that at the moment s3_website does not support any region that
|
197
|
+
supports only a V4 Signature and does not support V2
|
198
|
+
(i.e. *eu-central-1*, *ap-south-1*, *ap-northeast-2*).
|
199
|
+
This support can be tracked in [issue #126](https://github.com/laurilehmijoki/s3_website/issues/126).
|
189
200
|
|
190
201
|
### Ignoring files you want to keep on AWS
|
191
202
|
|
data/changelog.md
CHANGED
@@ -2,6 +2,12 @@
|
|
2
2
|
|
3
3
|
This project uses [Semantic Versioning](http://semver.org).
|
4
4
|
|
5
|
+
## 2.14.1
|
6
|
+
|
7
|
+
* Do not gzip a file that is already gzipped
|
8
|
+
|
9
|
+
See <https://github.com/laurilehmijoki/s3_website/issues/229> for discussion
|
10
|
+
|
5
11
|
## 2.14.0
|
6
12
|
|
7
13
|
* Add support for CloudFront wildcard invalidations
|
data/lib/s3_website/version.rb
CHANGED
@@ -1 +1 @@
|
|
1
|
-
|
1
|
+
53be05bebd606f3d1e2443beb13130a5
|
@@ -2,15 +2,20 @@ package s3.website.model
|
|
2
2
|
|
3
3
|
import com.amazonaws.services.s3.model.S3ObjectSummary
|
4
4
|
import java.io._
|
5
|
+
|
5
6
|
import org.apache.commons.codec.digest.DigestUtils
|
6
|
-
import java.util.zip.GZIPOutputStream
|
7
|
+
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
|
8
|
+
|
7
9
|
import org.apache.tika.Tika
|
8
10
|
import s3.website.Ruby._
|
9
11
|
import s3.website._
|
10
12
|
import s3.website.model.Upload.tika
|
11
13
|
import s3.website.model.Encoding.encodingOnS3
|
12
14
|
import java.io.File.createTempFile
|
15
|
+
|
16
|
+
import org.apache.commons.io.FileUtils
|
13
17
|
import org.apache.commons.io.IOUtils.copy
|
18
|
+
|
14
19
|
import scala.concurrent.{ExecutionContextExecutor, Future}
|
15
20
|
import scala.util.Try
|
16
21
|
|
@@ -51,7 +56,7 @@ case object RedirectFile extends UploadType {
|
|
51
56
|
val pushAction = Redirected
|
52
57
|
}
|
53
58
|
|
54
|
-
case class Upload(originalFile: File, uploadType: UploadType)(implicit site: Site) {
|
59
|
+
case class Upload(originalFile: File, uploadType: UploadType)(implicit site: Site, logger: Logger) {
|
55
60
|
lazy val s3Key = site.resolveS3Key(originalFile)
|
56
61
|
|
57
62
|
lazy val encodingOnS3 = Encoding.encodingOnS3(s3Key)
|
@@ -96,21 +101,37 @@ case class Upload(originalFile: File, uploadType: UploadType)(implicit site: Sit
|
|
96
101
|
object Upload {
|
97
102
|
lazy val tika = Try(new Tika())
|
98
103
|
|
99
|
-
def md5(originalFile: File)(implicit site: Site): Try[MD5] =
|
104
|
+
def md5(originalFile: File)(implicit site: Site, logger: Logger): Try[MD5] =
|
100
105
|
uploadFile(originalFile) map { file =>
|
101
106
|
using(fis { file }) { DigestUtils.md5Hex }
|
102
107
|
}
|
103
108
|
|
104
|
-
def uploadFile(originalFile: File)(implicit site: Site): Try[File] =
|
109
|
+
def uploadFile(originalFile: File)(implicit site: Site, logger: Logger): Try[File] =
|
105
110
|
encodingOnS3(site resolveS3Key originalFile)
|
106
111
|
.fold(Try(originalFile))(algorithm =>
|
107
112
|
Try {
|
108
|
-
val
|
109
|
-
|
110
|
-
|
111
|
-
|
113
|
+
val isAlreadyGzipped =
|
114
|
+
if (originalFile.length() < 2) {
|
115
|
+
false
|
116
|
+
} else {
|
117
|
+
val fis = new FileInputStream(originalFile)
|
118
|
+
val amountOfMagicGzipBytes = 2
|
119
|
+
val firstTwoBytes = Array.fill[Byte](amountOfMagicGzipBytes)(0)
|
120
|
+
fis.read(firstTwoBytes, 0, amountOfMagicGzipBytes)
|
121
|
+
val head = firstTwoBytes(0) & 0xff | (firstTwoBytes(1) << 8) & 0xff00
|
122
|
+
head == GZIPInputStream.GZIP_MAGIC
|
123
|
+
}
|
124
|
+
if (isAlreadyGzipped) {
|
125
|
+
logger.debug(s"File ${originalFile.getAbsolutePath} is already gzipped. Skipping gzip.")
|
126
|
+
originalFile
|
127
|
+
} else {
|
128
|
+
val tempFile = createTempFile(originalFile.getName, "gzip")
|
129
|
+
tempFile.deleteOnExit()
|
130
|
+
using(new GZIPOutputStream(new FileOutputStream(tempFile))) { stream =>
|
131
|
+
copy(fis(originalFile), stream)
|
132
|
+
}
|
133
|
+
tempFile
|
112
134
|
}
|
113
|
-
tempFile
|
114
135
|
}
|
115
136
|
)
|
116
137
|
|
@@ -1,7 +1,9 @@
|
|
1
1
|
package s3.website
|
2
2
|
|
3
|
-
import java.io.
|
3
|
+
import java.io._
|
4
|
+
import java.nio.charset.StandardCharsets
|
4
5
|
import java.util.concurrent.atomic.AtomicInteger
|
6
|
+
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
|
5
7
|
|
6
8
|
import com.amazonaws.AmazonServiceException
|
7
9
|
import com.amazonaws.services.cloudfront.AmazonCloudFront
|
@@ -9,8 +11,9 @@ import com.amazonaws.services.cloudfront.model.{CreateInvalidationRequest, Creat
|
|
9
11
|
import com.amazonaws.services.s3.AmazonS3
|
10
12
|
import com.amazonaws.services.s3.model._
|
11
13
|
import org.apache.commons.codec.digest.DigestUtils._
|
12
|
-
import org.apache.commons.io.FileUtils
|
13
14
|
import org.apache.commons.io.FileUtils._
|
15
|
+
import org.apache.commons.io.IOUtils.{write => _}
|
16
|
+
import org.apache.commons.io.{FileUtils, IOUtils}
|
14
17
|
import org.mockito.Mockito._
|
15
18
|
import org.mockito.invocation.InvocationOnMock
|
16
19
|
import org.mockito.stubbing.Answer
|
@@ -18,9 +21,9 @@ import org.mockito.{ArgumentCaptor, Matchers, Mockito}
|
|
18
21
|
import org.specs2.mutable.{BeforeAfter, Specification}
|
19
22
|
import org.specs2.specification.Scope
|
20
23
|
import s3.website.CloudFront.CloudFrontSetting
|
21
|
-
import s3.website.
|
22
|
-
import s3.website.Push.{CliArgs}
|
24
|
+
import s3.website.Push.CliArgs
|
23
25
|
import s3.website.S3.S3Setting
|
26
|
+
import s3.website.UploadHelper.DELETE_NOTHING_MAGIC_WORD
|
24
27
|
import s3.website.model.Config.S3_website_yml
|
25
28
|
import s3.website.model.Ssg.automaticallySupportedSiteGenerators
|
26
29
|
import s3.website.model._
|
@@ -41,6 +44,52 @@ class S3WebsiteSpec extends Specification {
|
|
41
44
|
sentPutObjectRequest.getKey must equalTo("styles.css")
|
42
45
|
}
|
43
46
|
|
47
|
+
"gzips a file" in new BasicSetup {
|
48
|
+
val htmlString = "<h1>hi again</h1>"
|
49
|
+
val gzippedBytes = gzip(htmlString.getBytes(StandardCharsets.UTF_8))
|
50
|
+
config = "gzip: true"
|
51
|
+
setLocalFileWithContent("index.html", gzippedBytes)
|
52
|
+
setS3File("styles.css", "1c5117e5839ad8fc00ce3c41296255a1" /* md5 of the gzip of the file contents */)
|
53
|
+
val putObjectRequestCaptor = ArgumentCaptor.forClass(classOf[PutObjectRequest])
|
54
|
+
push()
|
55
|
+
sentPutObjectRequest.getKey must equalTo("index.html")
|
56
|
+
verify(amazonS3Client).putObject(putObjectRequestCaptor.capture())
|
57
|
+
|
58
|
+
val bytesToS3: InputStream = putObjectRequestCaptor.getValue.getInputStream
|
59
|
+
val unzippedBytesToS3 = new GZIPInputStream(bytesToS3)
|
60
|
+
val unzippedString = IOUtils.toString(unzippedBytesToS3, StandardCharsets.UTF_8)
|
61
|
+
|
62
|
+
unzippedString must equalTo(htmlString)
|
63
|
+
}
|
64
|
+
|
65
|
+
"not gzip the file if it's already gzipped" in new BasicSetup {
|
66
|
+
config = "gzip: true"
|
67
|
+
|
68
|
+
val cssString = "body { color: red }"
|
69
|
+
val gzippedCss = gzip(cssString.getBytes(StandardCharsets.UTF_8))
|
70
|
+
setLocalFileWithContent("styles.css", gzippedCss)
|
71
|
+
val putObjectRequestCaptor = ArgumentCaptor.forClass(classOf[PutObjectRequest])
|
72
|
+
push()
|
73
|
+
sentPutObjectRequest.getKey must equalTo("styles.css")
|
74
|
+
verify(amazonS3Client).putObject(putObjectRequestCaptor.capture())
|
75
|
+
|
76
|
+
val bytesToS3: InputStream = putObjectRequestCaptor.getValue.getInputStream
|
77
|
+
val unzippedBytesToS3 = new GZIPInputStream(bytesToS3)
|
78
|
+
val unzippedString = IOUtils.toString(unzippedBytesToS3, StandardCharsets.UTF_8)
|
79
|
+
|
80
|
+
unzippedString must equalTo(cssString)
|
81
|
+
}
|
82
|
+
|
83
|
+
def gzip(data: Array[Byte]): Array[Byte] = {
|
84
|
+
def using[T <: Closeable, R](cl: T)(f: (T) => R): R = try f(cl) finally cl.close()
|
85
|
+
|
86
|
+
val gzippedOutputStream: ByteArrayOutputStream = new ByteArrayOutputStream
|
87
|
+
using(new GZIPOutputStream(gzippedOutputStream)) { stream =>
|
88
|
+
IOUtils.copy(new ByteArrayInputStream(data), stream)
|
89
|
+
}
|
90
|
+
gzippedOutputStream.toByteArray
|
91
|
+
}
|
92
|
+
|
44
93
|
"not update a gzipped S3 object if the contents has not changed" in new BasicSetup {
|
45
94
|
config = "gzip: true"
|
46
95
|
setLocalFileWithContent(("styles.css", "<h1>hi</h1>"))
|
@@ -1343,6 +1392,13 @@ class S3WebsiteSpec extends Specification {
|
|
1343
1392
|
write(file, fileNameAndContent._2)
|
1344
1393
|
}
|
1345
1394
|
|
1395
|
+
def setLocalFileWithContent(fileName: String, contents: Array[Byte]) = {
|
1396
|
+
val file = new File(siteDirectory, fileName)
|
1397
|
+
forceMkdir(file.getParentFile)
|
1398
|
+
file.createNewFile()
|
1399
|
+
FileUtils.writeByteArrayToFile(file, contents)
|
1400
|
+
}
|
1401
|
+
|
1346
1402
|
var config = ""
|
1347
1403
|
val baseConfig =
|
1348
1404
|
"""
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: s3_website
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.14.
|
4
|
+
version: 2.14.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Lauri Lehmijoki
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-08-02 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: thor
|