jekyll-pwa-plugin 2.2.3 → 5.1.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/lib/jekyll-pwa-plugin.rb +11 -3
- data/lib/vendor/workbox-v5.1.4/workbox-background-sync.dev.js +818 -0
- data/lib/vendor/workbox-v5.1.4/workbox-background-sync.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-background-sync.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-background-sync.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-broadcast-update.dev.js +288 -0
- data/lib/vendor/workbox-v5.1.4/workbox-broadcast-update.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-broadcast-update.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-broadcast-update.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-cacheable-response.dev.js +191 -0
- data/lib/vendor/workbox-v5.1.4/workbox-cacheable-response.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-cacheable-response.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-cacheable-response.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-core.dev.js +1858 -0
- data/lib/vendor/workbox-v5.1.4/workbox-core.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-core.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-core.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-expiration.dev.js +649 -0
- data/lib/vendor/workbox-v5.1.4/workbox-expiration.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-expiration.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-expiration.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-navigation-preload.dev.js +102 -0
- data/lib/vendor/workbox-v5.1.4/workbox-navigation-preload.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-navigation-preload.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-navigation-preload.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-offline-ga.dev.js +235 -0
- data/lib/vendor/workbox-v5.1.4/workbox-offline-ga.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-offline-ga.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-offline-ga.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-precaching.dev.js +1210 -0
- data/lib/vendor/workbox-v5.1.4/workbox-precaching.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-precaching.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-precaching.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-range-requests.dev.js +262 -0
- data/lib/vendor/workbox-v5.1.4/workbox-range-requests.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-range-requests.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-range-requests.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-routing.dev.js +923 -0
- data/lib/vendor/workbox-v5.1.4/workbox-routing.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-routing.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-routing.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-strategies.dev.js +923 -0
- data/lib/vendor/workbox-v5.1.4/workbox-strategies.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-strategies.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-strategies.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-streams.dev.js +318 -0
- data/lib/vendor/workbox-v5.1.4/workbox-streams.dev.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-streams.prod.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-streams.prod.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-sw.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-sw.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.es5.mjs +1125 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.es5.mjs.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.mjs +943 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.mjs.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.umd.js +1136 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.dev.umd.js.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.es5.mjs +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.es5.mjs.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.mjs +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.mjs.map +1 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.umd.js +2 -0
- data/lib/vendor/workbox-v5.1.4/workbox-window.prod.umd.js.map +1 -0
- metadata +65 -55
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.dev.js +0 -593
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.dev.js +0 -395
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.dev.js +0 -740
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.dev.js +0 -236
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-core.dev.js +0 -1736
- data/lib/vendor/workbox-v3.6.3/workbox-core.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-core.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-core.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.dev.js +0 -255
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.dev.js +0 -159
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.dev.js +0 -1171
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.dev.js +0 -299
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-routing.dev.js +0 -863
- data/lib/vendor/workbox-v3.6.3/workbox-routing.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-routing.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-routing.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.dev.js +0 -1172
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-streams.dev.js +0 -380
- data/lib/vendor/workbox-v3.6.3/workbox-streams.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-streams.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-streams.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-sw.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-sw.js.map +0 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 31fa8631021d968504ac796dfb537cdc393491477822eab25a03c388585838a0
|
4
|
+
data.tar.gz: 68dc858281832595cc85608f075259ef534805e613919dcdf812c31b1cedf999
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 93921fa7d512f3608f7258767b3351bf93781926f1e3c46275fe73b4692da9a575d9657a9394b092f81db7484cda7fb220ffdbe00dfa9366564fe6fbd04525c4
|
7
|
+
data.tar.gz: d2e965dfa644b3619e380a86957af4087e8bbe33919a4e96de84d20292f8a9e44aab89e007bf28a67d4dfd1c2abbe4405d0450d9b771d482e84d35c5c143fac5
|
data/lib/jekyll-pwa-plugin.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
class SWHelper
|
2
|
-
WORKBOX_VERSION = '
|
2
|
+
WORKBOX_VERSION = 'v5.1.4'
|
3
3
|
def initialize(site, config)
|
4
4
|
@site = site
|
5
5
|
@config = config
|
@@ -21,8 +21,7 @@ class SWHelper
|
|
21
21
|
switch (installingWorker.state) {
|
22
22
|
case 'installed':
|
23
23
|
if (navigator.serviceWorker.controller) {
|
24
|
-
var event =
|
25
|
-
event.initEvent('sw.update', true, true);
|
24
|
+
var event = new Event('sw.update');
|
26
25
|
window.dispatchEvent(event);
|
27
26
|
}
|
28
27
|
break;
|
@@ -151,22 +150,31 @@ end
|
|
151
150
|
module Jekyll
|
152
151
|
|
153
152
|
Hooks.register :pages, :post_render do |page|
|
153
|
+
enabled = (page.site.config.dig('pwa', 'enabled') != false)
|
154
|
+
if enabled
|
154
155
|
# append <script> for sw-register.js in <body>
|
155
156
|
SWHelper.insert_sw_register_into_body(page)
|
157
|
+
end
|
156
158
|
end
|
157
159
|
|
158
160
|
Hooks.register :documents, :post_render do |document|
|
161
|
+
enabled = (page.site.config.dig('pwa', 'enabled') != false)
|
162
|
+
if enabled
|
159
163
|
# append <script> for sw-register.js in <body>
|
160
164
|
SWHelper.insert_sw_register_into_body(document)
|
165
|
+
end
|
161
166
|
end
|
162
167
|
|
163
168
|
Hooks.register :site, :post_write do |site|
|
169
|
+
enabled = (page.site.config.dig('pwa', 'enabled') != false)
|
170
|
+
if enabled
|
164
171
|
pwa_config = site.config['pwa'] || {}
|
165
172
|
sw_helper = SWHelper.new(site, pwa_config)
|
166
173
|
|
167
174
|
sw_helper.write_sw_register()
|
168
175
|
sw_helper.generate_workbox_precache()
|
169
176
|
sw_helper.write_sw()
|
177
|
+
end
|
170
178
|
end
|
171
179
|
|
172
180
|
end
|
@@ -0,0 +1,818 @@
|
|
1
|
+
this.workbox = this.workbox || {};
|
2
|
+
this.workbox.backgroundSync = (function (exports, WorkboxError_js, logger_js, assert_js, getFriendlyURL_js, DBWrapper_js) {
|
3
|
+
'use strict';
|
4
|
+
|
5
|
+
try {
|
6
|
+
self['workbox:background-sync:5.1.4'] && _();
|
7
|
+
} catch (e) {}
|
8
|
+
|
9
|
+
/*
|
10
|
+
Copyright 2018 Google LLC
|
11
|
+
|
12
|
+
Use of this source code is governed by an MIT-style
|
13
|
+
license that can be found in the LICENSE file or at
|
14
|
+
https://opensource.org/licenses/MIT.
|
15
|
+
*/
|
16
|
+
const DB_VERSION = 3;
|
17
|
+
const DB_NAME = 'workbox-background-sync';
|
18
|
+
const OBJECT_STORE_NAME = 'requests';
|
19
|
+
const INDEXED_PROP = 'queueName';
|
20
|
+
/**
|
21
|
+
* A class to manage storing requests from a Queue in IndexedDB,
|
22
|
+
* indexed by their queue name for easier access.
|
23
|
+
*
|
24
|
+
* @private
|
25
|
+
*/
|
26
|
+
|
27
|
+
class QueueStore {
|
28
|
+
/**
|
29
|
+
* Associates this instance with a Queue instance, so entries added can be
|
30
|
+
* identified by their queue name.
|
31
|
+
*
|
32
|
+
* @param {string} queueName
|
33
|
+
* @private
|
34
|
+
*/
|
35
|
+
constructor(queueName) {
|
36
|
+
this._queueName = queueName;
|
37
|
+
this._db = new DBWrapper_js.DBWrapper(DB_NAME, DB_VERSION, {
|
38
|
+
onupgradeneeded: this._upgradeDb
|
39
|
+
});
|
40
|
+
}
|
41
|
+
/**
|
42
|
+
* Append an entry last in the queue.
|
43
|
+
*
|
44
|
+
* @param {Object} entry
|
45
|
+
* @param {Object} entry.requestData
|
46
|
+
* @param {number} [entry.timestamp]
|
47
|
+
* @param {Object} [entry.metadata]
|
48
|
+
* @private
|
49
|
+
*/
|
50
|
+
|
51
|
+
|
52
|
+
async pushEntry(entry) {
|
53
|
+
{
|
54
|
+
assert_js.assert.isType(entry, 'object', {
|
55
|
+
moduleName: 'workbox-background-sync',
|
56
|
+
className: 'QueueStore',
|
57
|
+
funcName: 'pushEntry',
|
58
|
+
paramName: 'entry'
|
59
|
+
});
|
60
|
+
assert_js.assert.isType(entry.requestData, 'object', {
|
61
|
+
moduleName: 'workbox-background-sync',
|
62
|
+
className: 'QueueStore',
|
63
|
+
funcName: 'pushEntry',
|
64
|
+
paramName: 'entry.requestData'
|
65
|
+
});
|
66
|
+
} // Don't specify an ID since one is automatically generated.
|
67
|
+
|
68
|
+
|
69
|
+
delete entry.id;
|
70
|
+
entry.queueName = this._queueName;
|
71
|
+
await this._db.add(OBJECT_STORE_NAME, entry);
|
72
|
+
}
|
73
|
+
/**
|
74
|
+
* Prepend an entry first in the queue.
|
75
|
+
*
|
76
|
+
* @param {Object} entry
|
77
|
+
* @param {Object} entry.requestData
|
78
|
+
* @param {number} [entry.timestamp]
|
79
|
+
* @param {Object} [entry.metadata]
|
80
|
+
* @private
|
81
|
+
*/
|
82
|
+
|
83
|
+
|
84
|
+
async unshiftEntry(entry) {
|
85
|
+
{
|
86
|
+
assert_js.assert.isType(entry, 'object', {
|
87
|
+
moduleName: 'workbox-background-sync',
|
88
|
+
className: 'QueueStore',
|
89
|
+
funcName: 'unshiftEntry',
|
90
|
+
paramName: 'entry'
|
91
|
+
});
|
92
|
+
assert_js.assert.isType(entry.requestData, 'object', {
|
93
|
+
moduleName: 'workbox-background-sync',
|
94
|
+
className: 'QueueStore',
|
95
|
+
funcName: 'unshiftEntry',
|
96
|
+
paramName: 'entry.requestData'
|
97
|
+
});
|
98
|
+
}
|
99
|
+
|
100
|
+
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
101
|
+
count: 1
|
102
|
+
});
|
103
|
+
|
104
|
+
if (firstEntry) {
|
105
|
+
// Pick an ID one less than the lowest ID in the object store.
|
106
|
+
entry.id = firstEntry.id - 1;
|
107
|
+
} else {
|
108
|
+
// Otherwise let the auto-incrementor assign the ID.
|
109
|
+
delete entry.id;
|
110
|
+
}
|
111
|
+
|
112
|
+
entry.queueName = this._queueName;
|
113
|
+
await this._db.add(OBJECT_STORE_NAME, entry);
|
114
|
+
}
|
115
|
+
/**
|
116
|
+
* Removes and returns the last entry in the queue matching the `queueName`.
|
117
|
+
*
|
118
|
+
* @return {Promise<Object>}
|
119
|
+
* @private
|
120
|
+
*/
|
121
|
+
|
122
|
+
|
123
|
+
async popEntry() {
|
124
|
+
return this._removeEntry({
|
125
|
+
direction: 'prev'
|
126
|
+
});
|
127
|
+
}
|
128
|
+
/**
|
129
|
+
* Removes and returns the first entry in the queue matching the `queueName`.
|
130
|
+
*
|
131
|
+
* @return {Promise<Object>}
|
132
|
+
* @private
|
133
|
+
*/
|
134
|
+
|
135
|
+
|
136
|
+
async shiftEntry() {
|
137
|
+
return this._removeEntry({
|
138
|
+
direction: 'next'
|
139
|
+
});
|
140
|
+
}
|
141
|
+
/**
|
142
|
+
* Returns all entries in the store matching the `queueName`.
|
143
|
+
*
|
144
|
+
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
|
145
|
+
* @return {Promise<Array<Object>>}
|
146
|
+
* @private
|
147
|
+
*/
|
148
|
+
|
149
|
+
|
150
|
+
async getAll() {
|
151
|
+
return await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
152
|
+
index: INDEXED_PROP,
|
153
|
+
query: IDBKeyRange.only(this._queueName)
|
154
|
+
});
|
155
|
+
}
|
156
|
+
/**
|
157
|
+
* Deletes the entry for the given ID.
|
158
|
+
*
|
159
|
+
* WARNING: this method does not ensure the deleted enry belongs to this
|
160
|
+
* queue (i.e. matches the `queueName`). But this limitation is acceptable
|
161
|
+
* as this class is not publicly exposed. An additional check would make
|
162
|
+
* this method slower than it needs to be.
|
163
|
+
*
|
164
|
+
* @private
|
165
|
+
* @param {number} id
|
166
|
+
*/
|
167
|
+
|
168
|
+
|
169
|
+
async deleteEntry(id) {
|
170
|
+
await this._db.delete(OBJECT_STORE_NAME, id);
|
171
|
+
}
|
172
|
+
/**
|
173
|
+
* Removes and returns the first or last entry in the queue (based on the
|
174
|
+
* `direction` argument) matching the `queueName`.
|
175
|
+
*
|
176
|
+
* @return {Promise<Object>}
|
177
|
+
* @private
|
178
|
+
*/
|
179
|
+
|
180
|
+
|
181
|
+
async _removeEntry({
|
182
|
+
direction
|
183
|
+
}) {
|
184
|
+
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
185
|
+
direction,
|
186
|
+
index: INDEXED_PROP,
|
187
|
+
query: IDBKeyRange.only(this._queueName),
|
188
|
+
count: 1
|
189
|
+
});
|
190
|
+
|
191
|
+
if (entry) {
|
192
|
+
await this.deleteEntry(entry.id);
|
193
|
+
return entry;
|
194
|
+
}
|
195
|
+
}
|
196
|
+
/**
|
197
|
+
* Upgrades the database given an `upgradeneeded` event.
|
198
|
+
*
|
199
|
+
* @param {Event} event
|
200
|
+
* @private
|
201
|
+
*/
|
202
|
+
|
203
|
+
|
204
|
+
_upgradeDb(event) {
|
205
|
+
const db = event.target.result;
|
206
|
+
|
207
|
+
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) {
|
208
|
+
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) {
|
209
|
+
db.deleteObjectStore(OBJECT_STORE_NAME);
|
210
|
+
}
|
211
|
+
}
|
212
|
+
|
213
|
+
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
214
|
+
autoIncrement: true,
|
215
|
+
keyPath: 'id'
|
216
|
+
});
|
217
|
+
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, {
|
218
|
+
unique: false
|
219
|
+
});
|
220
|
+
}
|
221
|
+
|
222
|
+
}
|
223
|
+
|
224
|
+
/*
|
225
|
+
Copyright 2018 Google LLC
|
226
|
+
|
227
|
+
Use of this source code is governed by an MIT-style
|
228
|
+
license that can be found in the LICENSE file or at
|
229
|
+
https://opensource.org/licenses/MIT.
|
230
|
+
*/
|
231
|
+
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
|
232
|
+
/**
|
233
|
+
* A class to make it easier to serialize and de-serialize requests so they
|
234
|
+
* can be stored in IndexedDB.
|
235
|
+
*
|
236
|
+
* @private
|
237
|
+
*/
|
238
|
+
|
239
|
+
class StorableRequest {
|
240
|
+
/**
|
241
|
+
* Accepts an object of request data that can be used to construct a
|
242
|
+
* `Request` but can also be stored in IndexedDB.
|
243
|
+
*
|
244
|
+
* @param {Object} requestData An object of request data that includes the
|
245
|
+
* `url` plus any relevant properties of
|
246
|
+
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
247
|
+
* @private
|
248
|
+
*/
|
249
|
+
constructor(requestData) {
|
250
|
+
{
|
251
|
+
assert_js.assert.isType(requestData, 'object', {
|
252
|
+
moduleName: 'workbox-background-sync',
|
253
|
+
className: 'StorableRequest',
|
254
|
+
funcName: 'constructor',
|
255
|
+
paramName: 'requestData'
|
256
|
+
});
|
257
|
+
assert_js.assert.isType(requestData.url, 'string', {
|
258
|
+
moduleName: 'workbox-background-sync',
|
259
|
+
className: 'StorableRequest',
|
260
|
+
funcName: 'constructor',
|
261
|
+
paramName: 'requestData.url'
|
262
|
+
});
|
263
|
+
} // If the request's mode is `navigate`, convert it to `same-origin` since
|
264
|
+
// navigation requests can't be constructed via script.
|
265
|
+
|
266
|
+
|
267
|
+
if (requestData['mode'] === 'navigate') {
|
268
|
+
requestData['mode'] = 'same-origin';
|
269
|
+
}
|
270
|
+
|
271
|
+
this._requestData = requestData;
|
272
|
+
}
|
273
|
+
/**
|
274
|
+
* Converts a Request object to a plain object that can be structured
|
275
|
+
* cloned or JSON-stringified.
|
276
|
+
*
|
277
|
+
* @param {Request} request
|
278
|
+
* @return {Promise<StorableRequest>}
|
279
|
+
*
|
280
|
+
* @private
|
281
|
+
*/
|
282
|
+
|
283
|
+
|
284
|
+
static async fromRequest(request) {
|
285
|
+
const requestData = {
|
286
|
+
url: request.url,
|
287
|
+
headers: {}
|
288
|
+
}; // Set the body if present.
|
289
|
+
|
290
|
+
if (request.method !== 'GET') {
|
291
|
+
// Use ArrayBuffer to support non-text request bodies.
|
292
|
+
// NOTE: we can't use Blobs becuse Safari doesn't support storing
|
293
|
+
// Blobs in IndexedDB in some cases:
|
294
|
+
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
295
|
+
requestData.body = await request.clone().arrayBuffer();
|
296
|
+
} // Convert the headers from an iterable to an object.
|
297
|
+
|
298
|
+
|
299
|
+
for (const [key, value] of request.headers.entries()) {
|
300
|
+
requestData.headers[key] = value;
|
301
|
+
} // Add all other serializable request properties
|
302
|
+
|
303
|
+
|
304
|
+
for (const prop of serializableProperties) {
|
305
|
+
if (request[prop] !== undefined) {
|
306
|
+
requestData[prop] = request[prop];
|
307
|
+
}
|
308
|
+
}
|
309
|
+
|
310
|
+
return new StorableRequest(requestData);
|
311
|
+
}
|
312
|
+
/**
|
313
|
+
* Returns a deep clone of the instances `_requestData` object.
|
314
|
+
*
|
315
|
+
* @return {Object}
|
316
|
+
*
|
317
|
+
* @private
|
318
|
+
*/
|
319
|
+
|
320
|
+
|
321
|
+
toObject() {
|
322
|
+
const requestData = Object.assign({}, this._requestData);
|
323
|
+
requestData.headers = Object.assign({}, this._requestData.headers);
|
324
|
+
|
325
|
+
if (requestData.body) {
|
326
|
+
requestData.body = requestData.body.slice(0);
|
327
|
+
}
|
328
|
+
|
329
|
+
return requestData;
|
330
|
+
}
|
331
|
+
/**
|
332
|
+
* Converts this instance to a Request.
|
333
|
+
*
|
334
|
+
* @return {Request}
|
335
|
+
*
|
336
|
+
* @private
|
337
|
+
*/
|
338
|
+
|
339
|
+
|
340
|
+
toRequest() {
|
341
|
+
return new Request(this._requestData.url, this._requestData);
|
342
|
+
}
|
343
|
+
/**
|
344
|
+
* Creates and returns a deep clone of the instance.
|
345
|
+
*
|
346
|
+
* @return {StorableRequest}
|
347
|
+
*
|
348
|
+
* @private
|
349
|
+
*/
|
350
|
+
|
351
|
+
|
352
|
+
clone() {
|
353
|
+
return new StorableRequest(this.toObject());
|
354
|
+
}
|
355
|
+
|
356
|
+
}
|
357
|
+
|
358
|
+
/*
|
359
|
+
Copyright 2018 Google LLC
|
360
|
+
|
361
|
+
Use of this source code is governed by an MIT-style
|
362
|
+
license that can be found in the LICENSE file or at
|
363
|
+
https://opensource.org/licenses/MIT.
|
364
|
+
*/
|
365
|
+
const TAG_PREFIX = 'workbox-background-sync';
|
366
|
+
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
367
|
+
|
368
|
+
const queueNames = new Set();
|
369
|
+
/**
|
370
|
+
* Converts a QueueStore entry into the format exposed by Queue. This entails
|
371
|
+
* converting the request data into a real request and omitting the `id` and
|
372
|
+
* `queueName` properties.
|
373
|
+
*
|
374
|
+
* @param {Object} queueStoreEntry
|
375
|
+
* @return {Object}
|
376
|
+
* @private
|
377
|
+
*/
|
378
|
+
|
379
|
+
const convertEntry = queueStoreEntry => {
|
380
|
+
const queueEntry = {
|
381
|
+
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
|
382
|
+
timestamp: queueStoreEntry.timestamp
|
383
|
+
};
|
384
|
+
|
385
|
+
if (queueStoreEntry.metadata) {
|
386
|
+
queueEntry.metadata = queueStoreEntry.metadata;
|
387
|
+
}
|
388
|
+
|
389
|
+
return queueEntry;
|
390
|
+
};
|
391
|
+
/**
|
392
|
+
* A class to manage storing failed requests in IndexedDB and retrying them
|
393
|
+
* later. All parts of the storing and replaying process are observable via
|
394
|
+
* callbacks.
|
395
|
+
*
|
396
|
+
* @memberof module:workbox-background-sync
|
397
|
+
*/
|
398
|
+
|
399
|
+
|
400
|
+
class Queue {
|
401
|
+
/**
|
402
|
+
* Creates an instance of Queue with the given options
|
403
|
+
*
|
404
|
+
* @param {string} name The unique name for this queue. This name must be
|
405
|
+
* unique as it's used to register sync events and store requests
|
406
|
+
* in IndexedDB specific to this instance. An error will be thrown if
|
407
|
+
* a duplicate name is detected.
|
408
|
+
* @param {Object} [options]
|
409
|
+
* @param {Function} [options.onSync] A function that gets invoked whenever
|
410
|
+
* the 'sync' event fires. The function is invoked with an object
|
411
|
+
* containing the `queue` property (referencing this instance), and you
|
412
|
+
* can use the callback to customize the replay behavior of the queue.
|
413
|
+
* When not set the `replayRequests()` method is called.
|
414
|
+
* Note: if the replay fails after a sync event, make sure you throw an
|
415
|
+
* error, so the browser knows to retry the sync event later.
|
416
|
+
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
417
|
+
* minutes) a request may be retried. After this amount of time has
|
418
|
+
* passed, the request will be deleted from the queue.
|
419
|
+
*/
|
420
|
+
constructor(name, {
|
421
|
+
onSync,
|
422
|
+
maxRetentionTime
|
423
|
+
} = {}) {
|
424
|
+
this._syncInProgress = false;
|
425
|
+
this._requestsAddedDuringSync = false; // Ensure the store name is not already being used
|
426
|
+
|
427
|
+
if (queueNames.has(name)) {
|
428
|
+
throw new WorkboxError_js.WorkboxError('duplicate-queue-name', {
|
429
|
+
name
|
430
|
+
});
|
431
|
+
} else {
|
432
|
+
queueNames.add(name);
|
433
|
+
}
|
434
|
+
|
435
|
+
this._name = name;
|
436
|
+
this._onSync = onSync || this.replayRequests;
|
437
|
+
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
438
|
+
this._queueStore = new QueueStore(this._name);
|
439
|
+
|
440
|
+
this._addSyncListener();
|
441
|
+
}
|
442
|
+
/**
|
443
|
+
* @return {string}
|
444
|
+
*/
|
445
|
+
|
446
|
+
|
447
|
+
get name() {
|
448
|
+
return this._name;
|
449
|
+
}
|
450
|
+
/**
|
451
|
+
* Stores the passed request in IndexedDB (with its timestamp and any
|
452
|
+
* metadata) at the end of the queue.
|
453
|
+
*
|
454
|
+
* @param {Object} entry
|
455
|
+
* @param {Request} entry.request The request to store in the queue.
|
456
|
+
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
457
|
+
* stored request. When requests are replayed you'll have access to this
|
458
|
+
* metadata object in case you need to modify the request beforehand.
|
459
|
+
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
460
|
+
* milliseconds) when the request was first added to the queue. This is
|
461
|
+
* used along with `maxRetentionTime` to remove outdated requests. In
|
462
|
+
* general you don't need to set this value, as it's automatically set
|
463
|
+
* for you (defaulting to `Date.now()`), but you can update it if you
|
464
|
+
* don't want particular requests to expire.
|
465
|
+
*/
|
466
|
+
|
467
|
+
|
468
|
+
async pushRequest(entry) {
|
469
|
+
{
|
470
|
+
assert_js.assert.isType(entry, 'object', {
|
471
|
+
moduleName: 'workbox-background-sync',
|
472
|
+
className: 'Queue',
|
473
|
+
funcName: 'pushRequest',
|
474
|
+
paramName: 'entry'
|
475
|
+
});
|
476
|
+
assert_js.assert.isInstance(entry.request, Request, {
|
477
|
+
moduleName: 'workbox-background-sync',
|
478
|
+
className: 'Queue',
|
479
|
+
funcName: 'pushRequest',
|
480
|
+
paramName: 'entry.request'
|
481
|
+
});
|
482
|
+
}
|
483
|
+
|
484
|
+
await this._addRequest(entry, 'push');
|
485
|
+
}
|
486
|
+
/**
|
487
|
+
* Stores the passed request in IndexedDB (with its timestamp and any
|
488
|
+
* metadata) at the beginning of the queue.
|
489
|
+
*
|
490
|
+
* @param {Object} entry
|
491
|
+
* @param {Request} entry.request The request to store in the queue.
|
492
|
+
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
493
|
+
* stored request. When requests are replayed you'll have access to this
|
494
|
+
* metadata object in case you need to modify the request beforehand.
|
495
|
+
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
496
|
+
* milliseconds) when the request was first added to the queue. This is
|
497
|
+
* used along with `maxRetentionTime` to remove outdated requests. In
|
498
|
+
* general you don't need to set this value, as it's automatically set
|
499
|
+
* for you (defaulting to `Date.now()`), but you can update it if you
|
500
|
+
* don't want particular requests to expire.
|
501
|
+
*/
|
502
|
+
|
503
|
+
|
504
|
+
async unshiftRequest(entry) {
|
505
|
+
{
|
506
|
+
assert_js.assert.isType(entry, 'object', {
|
507
|
+
moduleName: 'workbox-background-sync',
|
508
|
+
className: 'Queue',
|
509
|
+
funcName: 'unshiftRequest',
|
510
|
+
paramName: 'entry'
|
511
|
+
});
|
512
|
+
assert_js.assert.isInstance(entry.request, Request, {
|
513
|
+
moduleName: 'workbox-background-sync',
|
514
|
+
className: 'Queue',
|
515
|
+
funcName: 'unshiftRequest',
|
516
|
+
paramName: 'entry.request'
|
517
|
+
});
|
518
|
+
}
|
519
|
+
|
520
|
+
await this._addRequest(entry, 'unshift');
|
521
|
+
}
|
522
|
+
/**
|
523
|
+
* Removes and returns the last request in the queue (along with its
|
524
|
+
* timestamp and any metadata). The returned object takes the form:
|
525
|
+
* `{request, timestamp, metadata}`.
|
526
|
+
*
|
527
|
+
* @return {Promise<Object>}
|
528
|
+
*/
|
529
|
+
|
530
|
+
|
531
|
+
async popRequest() {
|
532
|
+
return this._removeRequest('pop');
|
533
|
+
}
|
534
|
+
/**
|
535
|
+
* Removes and returns the first request in the queue (along with its
|
536
|
+
* timestamp and any metadata). The returned object takes the form:
|
537
|
+
* `{request, timestamp, metadata}`.
|
538
|
+
*
|
539
|
+
* @return {Promise<Object>}
|
540
|
+
*/
|
541
|
+
|
542
|
+
|
543
|
+
async shiftRequest() {
|
544
|
+
return this._removeRequest('shift');
|
545
|
+
}
|
546
|
+
/**
|
547
|
+
* Returns all the entries that have not expired (per `maxRetentionTime`).
|
548
|
+
* Any expired entries are removed from the queue.
|
549
|
+
*
|
550
|
+
* @return {Promise<Array<Object>>}
|
551
|
+
*/
|
552
|
+
|
553
|
+
|
554
|
+
async getAll() {
|
555
|
+
const allEntries = await this._queueStore.getAll();
|
556
|
+
const now = Date.now();
|
557
|
+
const unexpiredEntries = [];
|
558
|
+
|
559
|
+
for (const entry of allEntries) {
|
560
|
+
// Ignore requests older than maxRetentionTime. Call this function
|
561
|
+
// recursively until an unexpired request is found.
|
562
|
+
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
563
|
+
|
564
|
+
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
565
|
+
await this._queueStore.deleteEntry(entry.id);
|
566
|
+
} else {
|
567
|
+
unexpiredEntries.push(convertEntry(entry));
|
568
|
+
}
|
569
|
+
}
|
570
|
+
|
571
|
+
return unexpiredEntries;
|
572
|
+
}
|
573
|
+
/**
|
574
|
+
* Adds the entry to the QueueStore and registers for a sync event.
|
575
|
+
*
|
576
|
+
* @param {Object} entry
|
577
|
+
* @param {Request} entry.request
|
578
|
+
* @param {Object} [entry.metadata]
|
579
|
+
* @param {number} [entry.timestamp=Date.now()]
|
580
|
+
* @param {string} operation ('push' or 'unshift')
|
581
|
+
* @private
|
582
|
+
*/
|
583
|
+
|
584
|
+
|
585
|
+
async _addRequest({
|
586
|
+
request,
|
587
|
+
metadata,
|
588
|
+
timestamp = Date.now()
|
589
|
+
}, operation) {
|
590
|
+
const storableRequest = await StorableRequest.fromRequest(request.clone());
|
591
|
+
const entry = {
|
592
|
+
requestData: storableRequest.toObject(),
|
593
|
+
timestamp
|
594
|
+
}; // Only include metadata if it's present.
|
595
|
+
|
596
|
+
if (metadata) {
|
597
|
+
entry.metadata = metadata;
|
598
|
+
}
|
599
|
+
|
600
|
+
await this._queueStore[`${operation}Entry`](entry);
|
601
|
+
|
602
|
+
{
|
603
|
+
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
|
604
|
+
} // Don't register for a sync if we're in the middle of a sync. Instead,
|
605
|
+
// we wait until the sync is complete and call register if
|
606
|
+
// `this._requestsAddedDuringSync` is true.
|
607
|
+
|
608
|
+
|
609
|
+
if (this._syncInProgress) {
|
610
|
+
this._requestsAddedDuringSync = true;
|
611
|
+
} else {
|
612
|
+
await this.registerSync();
|
613
|
+
}
|
614
|
+
}
|
615
|
+
/**
|
616
|
+
* Removes and returns the first or last (depending on `operation`) entry
|
617
|
+
* from the QueueStore that's not older than the `maxRetentionTime`.
|
618
|
+
*
|
619
|
+
* @param {string} operation ('pop' or 'shift')
|
620
|
+
* @return {Object|undefined}
|
621
|
+
* @private
|
622
|
+
*/
|
623
|
+
|
624
|
+
|
625
|
+
async _removeRequest(operation) {
|
626
|
+
const now = Date.now();
|
627
|
+
const entry = await this._queueStore[`${operation}Entry`]();
|
628
|
+
|
629
|
+
if (entry) {
|
630
|
+
// Ignore requests older than maxRetentionTime. Call this function
|
631
|
+
// recursively until an unexpired request is found.
|
632
|
+
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
633
|
+
|
634
|
+
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
635
|
+
return this._removeRequest(operation);
|
636
|
+
}
|
637
|
+
|
638
|
+
return convertEntry(entry);
|
639
|
+
} else {
|
640
|
+
return undefined;
|
641
|
+
}
|
642
|
+
}
|
643
|
+
/**
|
644
|
+
* Loops through each request in the queue and attempts to re-fetch it.
|
645
|
+
* If any request fails to re-fetch, it's put back in the same position in
|
646
|
+
* the queue (which registers a retry for the next sync event).
|
647
|
+
*/
|
648
|
+
|
649
|
+
|
650
|
+
async replayRequests() {
|
651
|
+
let entry;
|
652
|
+
|
653
|
+
while (entry = await this.shiftRequest()) {
|
654
|
+
try {
|
655
|
+
await fetch(entry.request.clone());
|
656
|
+
|
657
|
+
if ("dev" !== 'production') {
|
658
|
+
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `has been replayed in queue '${this._name}'`);
|
659
|
+
}
|
660
|
+
} catch (error) {
|
661
|
+
await this.unshiftRequest(entry);
|
662
|
+
|
663
|
+
{
|
664
|
+
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `failed to replay, putting it back in queue '${this._name}'`);
|
665
|
+
}
|
666
|
+
|
667
|
+
throw new WorkboxError_js.WorkboxError('queue-replay-failed', {
|
668
|
+
name: this._name
|
669
|
+
});
|
670
|
+
}
|
671
|
+
}
|
672
|
+
|
673
|
+
{
|
674
|
+
logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
|
675
|
+
}
|
676
|
+
}
|
677
|
+
/**
|
678
|
+
* Registers a sync event with a tag unique to this instance.
|
679
|
+
*/
|
680
|
+
|
681
|
+
|
682
|
+
async registerSync() {
|
683
|
+
if ('sync' in self.registration) {
|
684
|
+
try {
|
685
|
+
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
686
|
+
} catch (err) {
|
687
|
+
// This means the registration failed for some reason, possibly due to
|
688
|
+
// the user disabling it.
|
689
|
+
{
|
690
|
+
logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
|
691
|
+
}
|
692
|
+
}
|
693
|
+
}
|
694
|
+
}
|
695
|
+
/**
|
696
|
+
* In sync-supporting browsers, this adds a listener for the sync event.
|
697
|
+
* In non-sync-supporting browsers, this will retry the queue on service
|
698
|
+
* worker startup.
|
699
|
+
*
|
700
|
+
* @private
|
701
|
+
*/
|
702
|
+
|
703
|
+
|
704
|
+
_addSyncListener() {
|
705
|
+
if ('sync' in self.registration) {
|
706
|
+
self.addEventListener('sync', event => {
|
707
|
+
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
708
|
+
{
|
709
|
+
logger_js.logger.log(`Background sync for tag '${event.tag}'` + `has been received`);
|
710
|
+
}
|
711
|
+
|
712
|
+
const syncComplete = async () => {
|
713
|
+
this._syncInProgress = true;
|
714
|
+
let syncError;
|
715
|
+
|
716
|
+
try {
|
717
|
+
await this._onSync({
|
718
|
+
queue: this
|
719
|
+
});
|
720
|
+
} catch (error) {
|
721
|
+
syncError = error; // Rethrow the error. Note: the logic in the finally clause
|
722
|
+
// will run before this gets rethrown.
|
723
|
+
|
724
|
+
throw syncError;
|
725
|
+
} finally {
|
726
|
+
// New items may have been added to the queue during the sync,
|
727
|
+
// so we need to register for a new sync if that's happened...
|
728
|
+
// Unless there was an error during the sync, in which
|
729
|
+
// case the browser will automatically retry later, as long
|
730
|
+
// as `event.lastChance` is not true.
|
731
|
+
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
|
732
|
+
await this.registerSync();
|
733
|
+
}
|
734
|
+
|
735
|
+
this._syncInProgress = false;
|
736
|
+
this._requestsAddedDuringSync = false;
|
737
|
+
}
|
738
|
+
};
|
739
|
+
|
740
|
+
event.waitUntil(syncComplete());
|
741
|
+
}
|
742
|
+
});
|
743
|
+
} else {
|
744
|
+
{
|
745
|
+
logger_js.logger.log(`Background sync replaying without background sync event`);
|
746
|
+
} // If the browser doesn't support background sync, retry
|
747
|
+
// every time the service worker starts up as a fallback.
|
748
|
+
|
749
|
+
|
750
|
+
this._onSync({
|
751
|
+
queue: this
|
752
|
+
});
|
753
|
+
}
|
754
|
+
}
|
755
|
+
/**
|
756
|
+
* Returns the set of queue names. This is primarily used to reset the list
|
757
|
+
* of queue names in tests.
|
758
|
+
*
|
759
|
+
* @return {Set}
|
760
|
+
*
|
761
|
+
* @private
|
762
|
+
*/
|
763
|
+
|
764
|
+
|
765
|
+
static get _queueNames() {
|
766
|
+
return queueNames;
|
767
|
+
}
|
768
|
+
|
769
|
+
}
|
770
|
+
|
771
|
+
/*
|
772
|
+
Copyright 2018 Google LLC
|
773
|
+
|
774
|
+
Use of this source code is governed by an MIT-style
|
775
|
+
license that can be found in the LICENSE file or at
|
776
|
+
https://opensource.org/licenses/MIT.
|
777
|
+
*/
|
778
|
+
/**
|
779
|
+
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
780
|
+
* easier to add failed requests to a background sync Queue.
|
781
|
+
*
|
782
|
+
* @memberof module:workbox-background-sync
|
783
|
+
*/
|
784
|
+
|
785
|
+
class BackgroundSyncPlugin {
|
786
|
+
/**
|
787
|
+
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
|
788
|
+
* documentation for parameter details.
|
789
|
+
* @param {Object} [options] See the
|
790
|
+
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
|
791
|
+
* parameter details.
|
792
|
+
*/
|
793
|
+
constructor(name, options) {
|
794
|
+
/**
|
795
|
+
* @param {Object} options
|
796
|
+
* @param {Request} options.request
|
797
|
+
* @private
|
798
|
+
*/
|
799
|
+
this.fetchDidFail = async ({
|
800
|
+
request
|
801
|
+
}) => {
|
802
|
+
await this._queue.pushRequest({
|
803
|
+
request
|
804
|
+
});
|
805
|
+
};
|
806
|
+
|
807
|
+
this._queue = new Queue(name, options);
|
808
|
+
}
|
809
|
+
|
810
|
+
}
|
811
|
+
|
812
|
+
exports.BackgroundSyncPlugin = BackgroundSyncPlugin;
|
813
|
+
exports.Queue = Queue;
|
814
|
+
|
815
|
+
return exports;
|
816
|
+
|
817
|
+
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private));
|
818
|
+
//# sourceMappingURL=workbox-background-sync.dev.js.map
|