jekyll-pwa-workbox 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/jekyll-pwa-workbox.rb +125 -125
- data/lib/vendor/workbox-v4.1.1/workbox-background-sync.dev.js +729 -0
- data/lib/vendor/workbox-v4.1.1/workbox-background-sync.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-background-sync.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-background-sync.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-broadcast-update.dev.js +482 -0
- data/lib/vendor/workbox-v4.1.1/workbox-broadcast-update.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-broadcast-update.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-broadcast-update.prod.js.map +1 -0
- data/lib/vendor/{workbox-v3.6.3 → workbox-v4.1.1}/workbox-cacheable-response.dev.js +34 -70
- data/lib/vendor/workbox-v4.1.1/workbox-cacheable-response.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-cacheable-response.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-cacheable-response.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-core.dev.js +1584 -0
- data/lib/vendor/workbox-v4.1.1/workbox-core.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-core.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-core.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-expiration.dev.js +633 -0
- data/lib/vendor/workbox-v4.1.1/workbox-expiration.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-expiration.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-expiration.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-navigation-preload.dev.js +110 -0
- data/lib/vendor/workbox-v4.1.1/workbox-navigation-preload.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-navigation-preload.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-navigation-preload.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-offline-ga.dev.js +243 -0
- data/lib/vendor/workbox-v4.1.1/workbox-offline-ga.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-offline-ga.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-offline-ga.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-precaching.dev.js +942 -0
- data/lib/vendor/workbox-v4.1.1/workbox-precaching.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-precaching.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-precaching.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-range-requests.dev.js +268 -0
- data/lib/vendor/workbox-v4.1.1/workbox-range-requests.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-range-requests.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-range-requests.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-routing.dev.js +1020 -0
- data/lib/vendor/workbox-v4.1.1/workbox-routing.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-routing.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-routing.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-strategies.dev.js +1138 -0
- data/lib/vendor/workbox-v4.1.1/workbox-strategies.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-strategies.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-strategies.prod.js.map +1 -0
- data/lib/vendor/{workbox-v3.6.3 → workbox-v4.1.1}/workbox-streams.dev.js +108 -151
- data/lib/vendor/workbox-v4.1.1/workbox-streams.dev.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-streams.prod.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-streams.prod.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-sw.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-sw.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.es5.mjs +879 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.es5.mjs.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.mjs +745 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.mjs.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.umd.js +890 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.dev.umd.js.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.es5.mjs +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.es5.mjs.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.mjs +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.mjs.map +1 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.umd.js +2 -0
- data/lib/vendor/workbox-v4.1.1/workbox-window.prod.umd.js.map +1 -0
- metadata +70 -58
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.dev.js +0 -593
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-background-sync.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.dev.js +0 -395
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-broadcast-cache-update.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.dev.js +0 -740
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-cache-expiration.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-cacheable-response.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-core.dev.js +0 -1736
- data/lib/vendor/workbox-v3.6.3/workbox-core.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-core.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-core.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.dev.js +0 -255
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-google-analytics.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.dev.js +0 -159
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-navigation-preload.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.dev.js +0 -1171
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-precaching.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.dev.js +0 -299
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-range-requests.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-routing.dev.js +0 -863
- data/lib/vendor/workbox-v3.6.3/workbox-routing.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-routing.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-routing.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.dev.js +0 -1172
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-strategies.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-streams.dev.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-streams.prod.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-streams.prod.js.map +0 -1
- data/lib/vendor/workbox-v3.6.3/workbox-sw.js +0 -3
- data/lib/vendor/workbox-v3.6.3/workbox-sw.js.map +0 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f2d25a2da53802619324b98c3ba7456c6e9a064b0f186fbfaf7013f1d073ff5b
|
4
|
+
data.tar.gz: 480a8ff15bc29fb2eb151f4cbc1db1dc7863bbbf4b006d4128250e62b830cef4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 17b5256c6fc933be0c06009f3578c690acf9aa8d14fa1f03d42105fbe6ad4e88e357598e24526746e641744946f8e64496afaa9529e97ab71bebed81dce3c438
|
7
|
+
data.tar.gz: 3f5a7ce67d4ed5674e4da3e5b4681fc0ab5e94e72b2f69e3c24aad2e933fb2391a0dfec9f60e59043893b0f746817bc956109c129c4091914929aa5cae479a84
|
data/lib/jekyll-pwa-workbox.rb
CHANGED
@@ -1,125 +1,125 @@
|
|
1
|
-
class SWHelper
|
2
|
-
WORKBOX_VERSION = '
|
3
|
-
def initialize(site, config)
|
4
|
-
@site = site
|
5
|
-
@config = config
|
6
|
-
@sw_filename = @config['sw_dest_filename'] || 'service-worker.js'
|
7
|
-
@sw_src_filepath = @config['sw_src_filepath'] || 'service-worker.js'
|
8
|
-
end
|
9
|
-
|
10
|
-
def write_sw_register()
|
11
|
-
sw_register_filename = 'sw-register.js'
|
12
|
-
sw_register_file = File.new(@site.in_dest_dir(sw_register_filename), 'w')
|
13
|
-
# add build version in url params
|
14
|
-
sw_register_file.puts(
|
15
|
-
<<-SCRIPT
|
16
|
-
"serviceWorker"in navigator&&navigator.serviceWorker.register("#{@site.baseurl.to_s}/#{@sw_filename}?v=#{@site.time.to_i.to_s}").then(function(e){e.onupdatefound=function(){var t=e.installing;t.onstatechange=function(){switch(t.state){case"installed":if(navigator.serviceWorker.controller){var e=document.createEvent("Event");e.initEvent("sw.update",!0,!0),window.dispatchEvent(e)}}}}}).catch(function(e){console.error("Error during service worker registration:",e)});
|
17
|
-
SCRIPT
|
18
|
-
)
|
19
|
-
sw_register_file.close
|
20
|
-
end
|
21
|
-
|
22
|
-
def generate_workbox_precache()
|
23
|
-
directory = @config['precache_glob_directory'] || '/'
|
24
|
-
directory = @site.in_dest_dir(directory)
|
25
|
-
patterns = @config['precache_glob_patterns'] || ['**/*.{html,js,css,eot,svg,ttf,woff}']
|
26
|
-
ignores = @config['precache_glob_ignores'] || []
|
27
|
-
recent_posts_num = @config['precache_recent_posts_num']
|
28
|
-
|
29
|
-
# according to workbox precache {url: 'main.js', revision: 'xxxx'}
|
30
|
-
@precache_list = []
|
31
|
-
|
32
|
-
# find precache files with glob
|
33
|
-
precache_files = []
|
34
|
-
patterns.each do |pattern|
|
35
|
-
Dir.glob(File.join(directory, pattern)) do |filepath|
|
36
|
-
precache_files.push(filepath)
|
37
|
-
end
|
38
|
-
end
|
39
|
-
precache_files = precache_files.uniq
|
40
|
-
|
41
|
-
# precache recent n posts
|
42
|
-
posts_path_url_map = {}
|
43
|
-
if recent_posts_num
|
44
|
-
precache_files.concat(
|
45
|
-
@site.posts.docs
|
46
|
-
.reverse.take(recent_posts_num)
|
47
|
-
.map do |post|
|
48
|
-
posts_path_url_map[post.path] = post.url
|
49
|
-
post.path
|
50
|
-
end
|
51
|
-
)
|
52
|
-
end
|
53
|
-
|
54
|
-
# filter with ignores
|
55
|
-
ignores.each do |pattern|
|
56
|
-
Dir.glob(File.join(directory, pattern)) do |ignored_filepath|
|
57
|
-
precache_files.delete(ignored_filepath)
|
58
|
-
end
|
59
|
-
end
|
60
|
-
|
61
|
-
# generate md5 for each precache file
|
62
|
-
md5 = Digest::MD5.new
|
63
|
-
precache_files.each do |filepath|
|
64
|
-
md5.reset
|
65
|
-
md5 << File.read(filepath)
|
66
|
-
if posts_path_url_map[filepath]
|
67
|
-
url = posts_path_url_map[filepath]
|
68
|
-
else
|
69
|
-
url = filepath.sub(@site.dest, '')
|
70
|
-
end
|
71
|
-
@precache_list.push({
|
72
|
-
url: @site.baseurl.to_s + url,
|
73
|
-
revision: md5.hexdigest
|
74
|
-
})
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
def write_sw()
|
79
|
-
|
80
|
-
dest_js_directory = @config['dest_js_directory'] || 'js'
|
81
|
-
|
82
|
-
# copy polyfill & workbox.js to js/
|
83
|
-
script_directory = @site.in_dest_dir(dest_js_directory)
|
84
|
-
FileUtils.mkdir_p(script_directory) unless Dir.exist?(script_directory)
|
85
|
-
FileUtils.cp_r(File.expand_path('../vendor/', __FILE__) + '/.', script_directory)
|
86
|
-
|
87
|
-
# generate precache list
|
88
|
-
precache_list_str = @precache_list.map do |precache_item|
|
89
|
-
precache_item.to_json
|
90
|
-
end
|
91
|
-
.join(",")
|
92
|
-
|
93
|
-
# write service-worker.js
|
94
|
-
sw_src_file_str = File.read(@site.in_source_dir(@sw_src_filepath))
|
95
|
-
workbox_dir = File.join(@site.baseurl.to_s, dest_js_directory, "workbox-#{SWHelper::WORKBOX_VERSION}")
|
96
|
-
import_scripts_str =
|
97
|
-
<<-SCRIPT
|
98
|
-
importScripts("#{workbox_dir}/workbox-sw.js");
|
99
|
-
workbox.setConfig({modulePathPrefix: "#{workbox_dir}"});
|
100
|
-
SCRIPT
|
101
|
-
|
102
|
-
sw_dest_file = File.new(@site.in_dest_dir(@sw_filename), 'w')
|
103
|
-
sw_dest_file.puts(
|
104
|
-
<<-SCRIPT
|
105
|
-
#{import_scripts_str}
|
106
|
-
self.__precacheManifest = [#{precache_list_str}];
|
107
|
-
#{sw_src_file_str}
|
108
|
-
SCRIPT
|
109
|
-
)
|
110
|
-
sw_dest_file.close
|
111
|
-
end
|
112
|
-
end
|
113
|
-
|
114
|
-
module Jekyll
|
115
|
-
|
116
|
-
Hooks.register :site, :post_write do |site|
|
117
|
-
pwa_config = site.config['pwa'] || {}
|
118
|
-
sw_helper = SWHelper.new(site, pwa_config)
|
119
|
-
|
120
|
-
sw_helper.write_sw_register()
|
121
|
-
sw_helper.generate_workbox_precache()
|
122
|
-
sw_helper.write_sw()
|
123
|
-
end
|
124
|
-
|
125
|
-
end
|
1
|
+
class SWHelper
|
2
|
+
WORKBOX_VERSION = 'v4.1.1'
|
3
|
+
def initialize(site, config)
|
4
|
+
@site = site
|
5
|
+
@config = config
|
6
|
+
@sw_filename = @config['sw_dest_filename'] || 'service-worker.js'
|
7
|
+
@sw_src_filepath = @config['sw_src_filepath'] || 'service-worker.js'
|
8
|
+
end
|
9
|
+
|
10
|
+
def write_sw_register()
|
11
|
+
sw_register_filename = 'sw-register.js'
|
12
|
+
sw_register_file = File.new(@site.in_dest_dir(sw_register_filename), 'w')
|
13
|
+
# add build version in url params
|
14
|
+
sw_register_file.puts(
|
15
|
+
<<-SCRIPT
|
16
|
+
"serviceWorker"in navigator&&navigator.serviceWorker.register("#{@site.baseurl.to_s}/#{@sw_filename}?v=#{@site.time.to_i.to_s}").then(function(e){e.onupdatefound=function(){var t=e.installing;t.onstatechange=function(){switch(t.state){case"installed":if(navigator.serviceWorker.controller){var e=document.createEvent("Event");e.initEvent("sw.update",!0,!0),window.dispatchEvent(e)}}}}}).catch(function(e){console.error("Error during service worker registration:",e)});
|
17
|
+
SCRIPT
|
18
|
+
)
|
19
|
+
sw_register_file.close
|
20
|
+
end
|
21
|
+
|
22
|
+
def generate_workbox_precache()
|
23
|
+
directory = @config['precache_glob_directory'] || '/'
|
24
|
+
directory = @site.in_dest_dir(directory)
|
25
|
+
patterns = @config['precache_glob_patterns'] || ['**/*.{html,js,css,eot,svg,ttf,woff}']
|
26
|
+
ignores = @config['precache_glob_ignores'] || []
|
27
|
+
recent_posts_num = @config['precache_recent_posts_num']
|
28
|
+
|
29
|
+
# according to workbox precache {url: 'main.js', revision: 'xxxx'}
|
30
|
+
@precache_list = []
|
31
|
+
|
32
|
+
# find precache files with glob
|
33
|
+
precache_files = []
|
34
|
+
patterns.each do |pattern|
|
35
|
+
Dir.glob(File.join(directory, pattern)) do |filepath|
|
36
|
+
precache_files.push(filepath)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
precache_files = precache_files.uniq
|
40
|
+
|
41
|
+
# precache recent n posts
|
42
|
+
posts_path_url_map = {}
|
43
|
+
if recent_posts_num
|
44
|
+
precache_files.concat(
|
45
|
+
@site.posts.docs
|
46
|
+
.reverse.take(recent_posts_num)
|
47
|
+
.map do |post|
|
48
|
+
posts_path_url_map[post.path] = post.url
|
49
|
+
post.path
|
50
|
+
end
|
51
|
+
)
|
52
|
+
end
|
53
|
+
|
54
|
+
# filter with ignores
|
55
|
+
ignores.each do |pattern|
|
56
|
+
Dir.glob(File.join(directory, pattern)) do |ignored_filepath|
|
57
|
+
precache_files.delete(ignored_filepath)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
# generate md5 for each precache file
|
62
|
+
md5 = Digest::MD5.new
|
63
|
+
precache_files.each do |filepath|
|
64
|
+
md5.reset
|
65
|
+
md5 << File.read(filepath)
|
66
|
+
if posts_path_url_map[filepath]
|
67
|
+
url = posts_path_url_map[filepath]
|
68
|
+
else
|
69
|
+
url = filepath.sub(@site.dest, '')
|
70
|
+
end
|
71
|
+
@precache_list.push({
|
72
|
+
url: @site.baseurl.to_s + url,
|
73
|
+
revision: md5.hexdigest
|
74
|
+
})
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def write_sw()
|
79
|
+
|
80
|
+
dest_js_directory = @config['dest_js_directory'] || 'js'
|
81
|
+
|
82
|
+
# copy polyfill & workbox.js to js/
|
83
|
+
script_directory = @site.in_dest_dir(dest_js_directory)
|
84
|
+
FileUtils.mkdir_p(script_directory) unless Dir.exist?(script_directory)
|
85
|
+
FileUtils.cp_r(File.expand_path('../vendor/', __FILE__) + '/.', script_directory)
|
86
|
+
|
87
|
+
# generate precache list
|
88
|
+
precache_list_str = @precache_list.map do |precache_item|
|
89
|
+
precache_item.to_json
|
90
|
+
end
|
91
|
+
.join(",")
|
92
|
+
|
93
|
+
# write service-worker.js
|
94
|
+
sw_src_file_str = File.read(@site.in_source_dir(@sw_src_filepath))
|
95
|
+
workbox_dir = File.join(@site.baseurl.to_s, dest_js_directory, "workbox-#{SWHelper::WORKBOX_VERSION}")
|
96
|
+
import_scripts_str =
|
97
|
+
<<-SCRIPT
|
98
|
+
importScripts("#{workbox_dir}/workbox-sw.js");
|
99
|
+
workbox.setConfig({modulePathPrefix: "#{workbox_dir}"});
|
100
|
+
SCRIPT
|
101
|
+
|
102
|
+
sw_dest_file = File.new(@site.in_dest_dir(@sw_filename), 'w')
|
103
|
+
sw_dest_file.puts(
|
104
|
+
<<-SCRIPT
|
105
|
+
#{import_scripts_str}
|
106
|
+
self.__precacheManifest = [#{precache_list_str}];
|
107
|
+
#{sw_src_file_str}
|
108
|
+
SCRIPT
|
109
|
+
)
|
110
|
+
sw_dest_file.close
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
module Jekyll
|
115
|
+
|
116
|
+
Hooks.register :site, :post_write do |site|
|
117
|
+
pwa_config = site.config['pwa'] || {}
|
118
|
+
sw_helper = SWHelper.new(site, pwa_config)
|
119
|
+
|
120
|
+
sw_helper.write_sw_register()
|
121
|
+
sw_helper.generate_workbox_precache()
|
122
|
+
sw_helper.write_sw()
|
123
|
+
end
|
124
|
+
|
125
|
+
end
|
@@ -0,0 +1,729 @@
|
|
1
|
+
this.workbox = this.workbox || {};
|
2
|
+
this.workbox.backgroundSync = (function (exports, WorkboxError_mjs, logger_mjs, assert_mjs, getFriendlyURL_mjs, DBWrapper_mjs) {
|
3
|
+
'use strict';
|
4
|
+
|
5
|
+
try {
|
6
|
+
self['workbox:background-sync:4.1.1'] && _();
|
7
|
+
} catch (e) {} // eslint-disable-line
|
8
|
+
|
9
|
+
/*
|
10
|
+
Copyright 2018 Google LLC
|
11
|
+
|
12
|
+
Use of this source code is governed by an MIT-style
|
13
|
+
license that can be found in the LICENSE file or at
|
14
|
+
https://opensource.org/licenses/MIT.
|
15
|
+
*/
|
16
|
+
const DB_VERSION = 3;
|
17
|
+
const DB_NAME = 'workbox-background-sync';
|
18
|
+
const OBJECT_STORE_NAME = 'requests';
|
19
|
+
const INDEXED_PROP = 'queueName';
|
20
|
+
/**
|
21
|
+
* A class to manage storing requests from a Queue in IndexedbDB,
|
22
|
+
* indexed by their queue name for easier access.
|
23
|
+
*
|
24
|
+
* @private
|
25
|
+
*/
|
26
|
+
|
27
|
+
class QueueStore {
|
28
|
+
/**
|
29
|
+
* Associates this instance with a Queue instance, so entries added can be
|
30
|
+
* identified by their queue name.
|
31
|
+
*
|
32
|
+
* @param {string} queueName
|
33
|
+
* @private
|
34
|
+
*/
|
35
|
+
constructor(queueName) {
|
36
|
+
this._queueName = queueName;
|
37
|
+
this._db = new DBWrapper_mjs.DBWrapper(DB_NAME, DB_VERSION, {
|
38
|
+
onupgradeneeded: evt => this._upgradeDb(evt)
|
39
|
+
});
|
40
|
+
}
|
41
|
+
/**
|
42
|
+
* Append an entry last in the queue.
|
43
|
+
*
|
44
|
+
* @param {Object} entry
|
45
|
+
* @param {Object} entry.requestData
|
46
|
+
* @param {number} [entry.timestamp]
|
47
|
+
* @param {Object} [entry.metadata]
|
48
|
+
*/
|
49
|
+
|
50
|
+
|
51
|
+
async pushEntry(entry) {
|
52
|
+
{
|
53
|
+
assert_mjs.assert.isType(entry, 'object', {
|
54
|
+
moduleName: 'workbox-background-sync',
|
55
|
+
className: 'QueueStore',
|
56
|
+
funcName: 'pushEntry',
|
57
|
+
paramName: 'entry'
|
58
|
+
});
|
59
|
+
assert_mjs.assert.isType(entry.requestData, 'object', {
|
60
|
+
moduleName: 'workbox-background-sync',
|
61
|
+
className: 'QueueStore',
|
62
|
+
funcName: 'pushEntry',
|
63
|
+
paramName: 'entry.requestData'
|
64
|
+
});
|
65
|
+
} // Don't specify an ID since one is automatically generated.
|
66
|
+
|
67
|
+
|
68
|
+
delete entry.id;
|
69
|
+
entry.queueName = this._queueName;
|
70
|
+
await this._db.add(OBJECT_STORE_NAME, entry);
|
71
|
+
}
|
72
|
+
/**
|
73
|
+
* Preppend an entry first in the queue.
|
74
|
+
*
|
75
|
+
* @param {Object} entry
|
76
|
+
* @param {Object} entry.requestData
|
77
|
+
* @param {number} [entry.timestamp]
|
78
|
+
* @param {Object} [entry.metadata]
|
79
|
+
*/
|
80
|
+
|
81
|
+
|
82
|
+
async unshiftEntry(entry) {
|
83
|
+
{
|
84
|
+
assert_mjs.assert.isType(entry, 'object', {
|
85
|
+
moduleName: 'workbox-background-sync',
|
86
|
+
className: 'QueueStore',
|
87
|
+
funcName: 'unshiftEntry',
|
88
|
+
paramName: 'entry'
|
89
|
+
});
|
90
|
+
assert_mjs.assert.isType(entry.requestData, 'object', {
|
91
|
+
moduleName: 'workbox-background-sync',
|
92
|
+
className: 'QueueStore',
|
93
|
+
funcName: 'unshiftEntry',
|
94
|
+
paramName: 'entry.requestData'
|
95
|
+
});
|
96
|
+
}
|
97
|
+
|
98
|
+
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
99
|
+
count: 1
|
100
|
+
});
|
101
|
+
|
102
|
+
if (firstEntry) {
|
103
|
+
// Pick an ID one less than the lowest ID in the object store.
|
104
|
+
entry.id = firstEntry.id - 1;
|
105
|
+
} else {
|
106
|
+
delete entry.id;
|
107
|
+
}
|
108
|
+
|
109
|
+
entry.queueName = this._queueName;
|
110
|
+
await this._db.add(OBJECT_STORE_NAME, entry);
|
111
|
+
}
|
112
|
+
/**
|
113
|
+
* Removes and returns the last entry in the queue matching the `queueName`.
|
114
|
+
*
|
115
|
+
* @return {Promise<Object>}
|
116
|
+
*/
|
117
|
+
|
118
|
+
|
119
|
+
async popEntry() {
|
120
|
+
return this._removeEntry({
|
121
|
+
direction: 'prev'
|
122
|
+
});
|
123
|
+
}
|
124
|
+
/**
|
125
|
+
* Removes and returns the first entry in the queue matching the `queueName`.
|
126
|
+
*
|
127
|
+
* @return {Promise<Object>}
|
128
|
+
*/
|
129
|
+
|
130
|
+
|
131
|
+
async shiftEntry() {
|
132
|
+
return this._removeEntry({
|
133
|
+
direction: 'next'
|
134
|
+
});
|
135
|
+
}
|
136
|
+
/**
|
137
|
+
* Removes and returns the first or last entry in the queue (based on the
|
138
|
+
* `direction` argument) matching the `queueName`.
|
139
|
+
*
|
140
|
+
* @return {Promise<Object>}
|
141
|
+
*/
|
142
|
+
|
143
|
+
|
144
|
+
async _removeEntry({
|
145
|
+
direction
|
146
|
+
}) {
|
147
|
+
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
148
|
+
direction,
|
149
|
+
index: INDEXED_PROP,
|
150
|
+
query: IDBKeyRange.only(this._queueName),
|
151
|
+
count: 1
|
152
|
+
});
|
153
|
+
|
154
|
+
if (entry) {
|
155
|
+
await this._db.delete(OBJECT_STORE_NAME, entry.id); // Dont' expose the ID or queueName;
|
156
|
+
|
157
|
+
delete entry.id;
|
158
|
+
delete entry.queueName;
|
159
|
+
return entry;
|
160
|
+
}
|
161
|
+
}
|
162
|
+
/**
|
163
|
+
* Upgrades the database given an `upgradeneeded` event.
|
164
|
+
*
|
165
|
+
* @param {Event} event
|
166
|
+
*/
|
167
|
+
|
168
|
+
|
169
|
+
_upgradeDb(event) {
|
170
|
+
const db = event.target.result;
|
171
|
+
|
172
|
+
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) {
|
173
|
+
db.deleteObjectStore(OBJECT_STORE_NAME);
|
174
|
+
}
|
175
|
+
|
176
|
+
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
177
|
+
autoIncrement: true,
|
178
|
+
keyPath: 'id'
|
179
|
+
});
|
180
|
+
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, {
|
181
|
+
unique: false
|
182
|
+
});
|
183
|
+
}
|
184
|
+
|
185
|
+
}
|
186
|
+
|
187
|
+
/*
|
188
|
+
Copyright 2018 Google LLC
|
189
|
+
|
190
|
+
Use of this source code is governed by an MIT-style
|
191
|
+
license that can be found in the LICENSE file or at
|
192
|
+
https://opensource.org/licenses/MIT.
|
193
|
+
*/
|
194
|
+
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
|
195
|
+
/**
|
196
|
+
* A class to make it easier to serialize and de-serialize requests so they
|
197
|
+
* can be stored in IndexedDB.
|
198
|
+
*
|
199
|
+
* @private
|
200
|
+
*/
|
201
|
+
|
202
|
+
class StorableRequest {
|
203
|
+
/**
|
204
|
+
* Converts a Request object to a plain object that can be structured
|
205
|
+
* cloned or JSON-stringified.
|
206
|
+
*
|
207
|
+
* @param {Request} request
|
208
|
+
* @return {Promise<StorableRequest>}
|
209
|
+
*
|
210
|
+
* @private
|
211
|
+
*/
|
212
|
+
static async fromRequest(request) {
|
213
|
+
const requestData = {
|
214
|
+
url: request.url,
|
215
|
+
headers: {}
|
216
|
+
}; // Set the body if present.
|
217
|
+
|
218
|
+
if (request.method !== 'GET') {
|
219
|
+
// Use ArrayBuffer to support non-text request bodies.
|
220
|
+
// NOTE: we can't use Blobs becuse Safari doesn't support storing
|
221
|
+
// Blobs in IndexedDB in some cases:
|
222
|
+
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
223
|
+
requestData.body = await request.clone().arrayBuffer();
|
224
|
+
} // Convert the headers from an iterable to an object.
|
225
|
+
|
226
|
+
|
227
|
+
for (const [key, value] of request.headers.entries()) {
|
228
|
+
requestData.headers[key] = value;
|
229
|
+
} // Add all other serializable request properties
|
230
|
+
|
231
|
+
|
232
|
+
for (const prop of serializableProperties) {
|
233
|
+
if (request[prop] !== undefined) {
|
234
|
+
requestData[prop] = request[prop];
|
235
|
+
}
|
236
|
+
}
|
237
|
+
|
238
|
+
return new StorableRequest(requestData);
|
239
|
+
}
|
240
|
+
/**
|
241
|
+
* Accepts an object of request data that can be used to construct a
|
242
|
+
* `Request` but can also be stored in IndexedDB.
|
243
|
+
*
|
244
|
+
* @param {Object} requestData An object of request data that includes the
|
245
|
+
* `url` plus any relevant properties of
|
246
|
+
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
247
|
+
* @private
|
248
|
+
*/
|
249
|
+
|
250
|
+
|
251
|
+
constructor(requestData) {
|
252
|
+
{
|
253
|
+
assert_mjs.assert.isType(requestData, 'object', {
|
254
|
+
moduleName: 'workbox-background-sync',
|
255
|
+
className: 'StorableRequest',
|
256
|
+
funcName: 'constructor',
|
257
|
+
paramName: 'requestData'
|
258
|
+
});
|
259
|
+
assert_mjs.assert.isType(requestData.url, 'string', {
|
260
|
+
moduleName: 'workbox-background-sync',
|
261
|
+
className: 'StorableRequest',
|
262
|
+
funcName: 'constructor',
|
263
|
+
paramName: 'requestData.url'
|
264
|
+
});
|
265
|
+
}
|
266
|
+
|
267
|
+
this._requestData = requestData;
|
268
|
+
}
|
269
|
+
/**
|
270
|
+
* Returns a deep clone of the instances `_requestData` object.
|
271
|
+
*
|
272
|
+
* @return {Object}
|
273
|
+
*
|
274
|
+
* @private
|
275
|
+
*/
|
276
|
+
|
277
|
+
|
278
|
+
toObject() {
|
279
|
+
const requestData = Object.assign({}, this._requestData);
|
280
|
+
requestData.headers = Object.assign({}, this._requestData.headers);
|
281
|
+
|
282
|
+
if (requestData.body) {
|
283
|
+
requestData.body = requestData.body.slice(0);
|
284
|
+
}
|
285
|
+
|
286
|
+
return requestData;
|
287
|
+
}
|
288
|
+
/**
|
289
|
+
* Converts this instance to a Request.
|
290
|
+
*
|
291
|
+
* @return {Request}
|
292
|
+
*
|
293
|
+
* @private
|
294
|
+
*/
|
295
|
+
|
296
|
+
|
297
|
+
toRequest() {
|
298
|
+
return new Request(this._requestData.url, this._requestData);
|
299
|
+
}
|
300
|
+
/**
|
301
|
+
* Creates and returns a deep clone of the instance.
|
302
|
+
*
|
303
|
+
* @return {StorableRequest}
|
304
|
+
*
|
305
|
+
* @private
|
306
|
+
*/
|
307
|
+
|
308
|
+
|
309
|
+
clone() {
|
310
|
+
return new StorableRequest(this.toObject());
|
311
|
+
}
|
312
|
+
|
313
|
+
}
|
314
|
+
|
315
|
+
/*
|
316
|
+
Copyright 2018 Google LLC
|
317
|
+
|
318
|
+
Use of this source code is governed by an MIT-style
|
319
|
+
license that can be found in the LICENSE file or at
|
320
|
+
https://opensource.org/licenses/MIT.
|
321
|
+
*/
|
322
|
+
const TAG_PREFIX = 'workbox-background-sync';
|
323
|
+
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
324
|
+
|
325
|
+
const queueNames = new Set();
|
326
|
+
/**
|
327
|
+
* A class to manage storing failed requests in IndexedDB and retrying them
|
328
|
+
* later. All parts of the storing and replaying process are observable via
|
329
|
+
* callbacks.
|
330
|
+
*
|
331
|
+
* @memberof workbox.backgroundSync
|
332
|
+
*/
|
333
|
+
|
334
|
+
class Queue {
|
335
|
+
/**
|
336
|
+
* Creates an instance of Queue with the given options
|
337
|
+
*
|
338
|
+
* @param {string} name The unique name for this queue. This name must be
|
339
|
+
* unique as it's used to register sync events and store requests
|
340
|
+
* in IndexedDB specific to this instance. An error will be thrown if
|
341
|
+
* a duplicate name is detected.
|
342
|
+
* @param {Object} [options]
|
343
|
+
* @param {Function} [options.onSync] A function that gets invoked whenever
|
344
|
+
* the 'sync' event fires. The function is invoked with an object
|
345
|
+
* containing the `queue` property (referencing this instance), and you
|
346
|
+
* can use the callback to customize the replay behavior of the queue.
|
347
|
+
* When not set the `replayRequests()` method is called.
|
348
|
+
* Note: if the replay fails after a sync event, make sure you throw an
|
349
|
+
* error, so the browser knows to retry the sync event later.
|
350
|
+
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
351
|
+
* minutes) a request may be retried. After this amount of time has
|
352
|
+
* passed, the request will be deleted from the queue.
|
353
|
+
*/
|
354
|
+
constructor(name, {
|
355
|
+
onSync,
|
356
|
+
maxRetentionTime
|
357
|
+
} = {}) {
|
358
|
+
// Ensure the store name is not already being used
|
359
|
+
if (queueNames.has(name)) {
|
360
|
+
throw new WorkboxError_mjs.WorkboxError('duplicate-queue-name', {
|
361
|
+
name
|
362
|
+
});
|
363
|
+
} else {
|
364
|
+
queueNames.add(name);
|
365
|
+
}
|
366
|
+
|
367
|
+
this._name = name;
|
368
|
+
this._onSync = onSync || this.replayRequests;
|
369
|
+
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
370
|
+
this._queueStore = new QueueStore(this._name);
|
371
|
+
|
372
|
+
this._addSyncListener();
|
373
|
+
}
|
374
|
+
/**
|
375
|
+
* @return {string}
|
376
|
+
*/
|
377
|
+
|
378
|
+
|
379
|
+
get name() {
|
380
|
+
return this._name;
|
381
|
+
}
|
382
|
+
/**
|
383
|
+
* Stores the passed request in IndexedDB (with its timestamp and any
|
384
|
+
* metadata) at the end of the queue.
|
385
|
+
*
|
386
|
+
* @param {Object} entry
|
387
|
+
* @param {Request} entry.request The request to store in the queue.
|
388
|
+
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
389
|
+
* stored request. When requests are replayed you'll have access to this
|
390
|
+
* metadata object in case you need to modify the request beforehand.
|
391
|
+
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
392
|
+
* milliseconds) when the request was first added to the queue. This is
|
393
|
+
* used along with `maxRetentionTime` to remove outdated requests. In
|
394
|
+
* general you don't need to set this value, as it's automatically set
|
395
|
+
* for you (defaulting to `Date.now()`), but you can update it if you
|
396
|
+
* don't want particular requests to expire.
|
397
|
+
*/
|
398
|
+
|
399
|
+
|
400
|
+
async pushRequest(entry) {
|
401
|
+
{
|
402
|
+
assert_mjs.assert.isType(entry, 'object', {
|
403
|
+
moduleName: 'workbox-background-sync',
|
404
|
+
className: 'Queue',
|
405
|
+
funcName: 'pushRequest',
|
406
|
+
paramName: 'entry'
|
407
|
+
});
|
408
|
+
assert_mjs.assert.isInstance(entry.request, Request, {
|
409
|
+
moduleName: 'workbox-background-sync',
|
410
|
+
className: 'Queue',
|
411
|
+
funcName: 'pushRequest',
|
412
|
+
paramName: 'entry.request'
|
413
|
+
});
|
414
|
+
}
|
415
|
+
|
416
|
+
await this._addRequest(entry, 'push');
|
417
|
+
}
|
418
|
+
/**
|
419
|
+
* Stores the passed request in IndexedDB (with its timestamp and any
|
420
|
+
* metadata) at the beginning of the queue.
|
421
|
+
*
|
422
|
+
* @param {Object} entry
|
423
|
+
* @param {Request} entry.request The request to store in the queue.
|
424
|
+
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
425
|
+
* stored request. When requests are replayed you'll have access to this
|
426
|
+
* metadata object in case you need to modify the request beforehand.
|
427
|
+
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
428
|
+
* milliseconds) when the request was first added to the queue. This is
|
429
|
+
* used along with `maxRetentionTime` to remove outdated requests. In
|
430
|
+
* general you don't need to set this value, as it's automatically set
|
431
|
+
* for you (defaulting to `Date.now()`), but you can update it if you
|
432
|
+
* don't want particular requests to expire.
|
433
|
+
*/
|
434
|
+
|
435
|
+
|
436
|
+
async unshiftRequest(entry) {
|
437
|
+
{
|
438
|
+
assert_mjs.assert.isType(entry, 'object', {
|
439
|
+
moduleName: 'workbox-background-sync',
|
440
|
+
className: 'Queue',
|
441
|
+
funcName: 'unshiftRequest',
|
442
|
+
paramName: 'entry'
|
443
|
+
});
|
444
|
+
assert_mjs.assert.isInstance(entry.request, Request, {
|
445
|
+
moduleName: 'workbox-background-sync',
|
446
|
+
className: 'Queue',
|
447
|
+
funcName: 'unshiftRequest',
|
448
|
+
paramName: 'entry.request'
|
449
|
+
});
|
450
|
+
}
|
451
|
+
|
452
|
+
await this._addRequest(entry, 'unshift');
|
453
|
+
}
|
454
|
+
/**
|
455
|
+
* Removes and returns the last request in the queue (along with its
|
456
|
+
* timestamp and any metadata). The returned object takes the form:
|
457
|
+
* `{request, timestamp, metadata}`.
|
458
|
+
*
|
459
|
+
* @return {Promise<Object>}
|
460
|
+
*/
|
461
|
+
|
462
|
+
|
463
|
+
async popRequest() {
|
464
|
+
return this._removeRequest('pop');
|
465
|
+
}
|
466
|
+
/**
|
467
|
+
* Removes and returns the first request in the queue (along with its
|
468
|
+
* timestamp and any metadata). The returned object takes the form:
|
469
|
+
* `{request, timestamp, metadata}`.
|
470
|
+
*
|
471
|
+
* @return {Promise<Object>}
|
472
|
+
*/
|
473
|
+
|
474
|
+
|
475
|
+
async shiftRequest() {
|
476
|
+
return this._removeRequest('shift');
|
477
|
+
}
|
478
|
+
/**
|
479
|
+
* Adds the entry to the QueueStore and registers for a sync event.
|
480
|
+
*
|
481
|
+
* @param {Object} entry
|
482
|
+
* @param {Request} entry.request
|
483
|
+
* @param {Object} [entry.metadata]
|
484
|
+
* @param {number} [entry.timestamp=Date.now()]
|
485
|
+
* @param {string} operation ('push' or 'unshift')
|
486
|
+
*/
|
487
|
+
|
488
|
+
|
489
|
+
async _addRequest({
|
490
|
+
request,
|
491
|
+
metadata,
|
492
|
+
timestamp = Date.now()
|
493
|
+
}, operation) {
|
494
|
+
const storableRequest = await StorableRequest.fromRequest(request.clone());
|
495
|
+
const entry = {
|
496
|
+
requestData: storableRequest.toObject(),
|
497
|
+
timestamp
|
498
|
+
}; // Only include metadata if it's present.
|
499
|
+
|
500
|
+
if (metadata) {
|
501
|
+
entry.metadata = metadata;
|
502
|
+
}
|
503
|
+
|
504
|
+
await this._queueStore[`${operation}Entry`](entry);
|
505
|
+
|
506
|
+
{
|
507
|
+
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
|
508
|
+
} // Don't register for a sync if we're in the middle of a sync. Instead,
|
509
|
+
// we wait until the sync is complete and call register if
|
510
|
+
// `this._requestsAddedDuringSync` is true.
|
511
|
+
|
512
|
+
|
513
|
+
if (this._syncInProgress) {
|
514
|
+
this._requestsAddedDuringSync = true;
|
515
|
+
} else {
|
516
|
+
await this.registerSync();
|
517
|
+
}
|
518
|
+
}
|
519
|
+
/**
|
520
|
+
* Removes and returns the first or last (depending on `operation`) entry
|
521
|
+
* form the QueueStore that's not older than the `maxRetentionTime`.
|
522
|
+
*
|
523
|
+
* @param {string} operation ('pop' or 'shift')
|
524
|
+
* @return {Object|undefined}
|
525
|
+
*/
|
526
|
+
|
527
|
+
|
528
|
+
async _removeRequest(operation) {
|
529
|
+
const now = Date.now();
|
530
|
+
const entry = await this._queueStore[`${operation}Entry`]();
|
531
|
+
|
532
|
+
if (entry) {
|
533
|
+
// Ignore requests older than maxRetentionTime. Call this function
|
534
|
+
// recursively until an unexpired request is found.
|
535
|
+
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
536
|
+
|
537
|
+
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
538
|
+
return this._removeRequest(operation);
|
539
|
+
}
|
540
|
+
|
541
|
+
entry.request = new StorableRequest(entry.requestData).toRequest();
|
542
|
+
delete entry.requestData;
|
543
|
+
return entry;
|
544
|
+
}
|
545
|
+
}
|
546
|
+
/**
|
547
|
+
* Loops through each request in the queue and attempts to re-fetch it.
|
548
|
+
* If any request fails to re-fetch, it's put back in the same position in
|
549
|
+
* the queue (which registers a retry for the next sync event).
|
550
|
+
*/
|
551
|
+
|
552
|
+
|
553
|
+
async replayRequests() {
|
554
|
+
let entry;
|
555
|
+
|
556
|
+
while (entry = await this.shiftRequest()) {
|
557
|
+
try {
|
558
|
+
await fetch(entry.request);
|
559
|
+
|
560
|
+
{
|
561
|
+
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(entry.request.url)}'` + `has been replayed in queue '${this._name}'`);
|
562
|
+
}
|
563
|
+
} catch (error) {
|
564
|
+
await this.unshiftRequest(entry);
|
565
|
+
|
566
|
+
{
|
567
|
+
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(entry.request.url)}'` + `failed to replay, putting it back in queue '${this._name}'`);
|
568
|
+
}
|
569
|
+
|
570
|
+
throw new WorkboxError_mjs.WorkboxError('queue-replay-failed', {
|
571
|
+
name: this._name
|
572
|
+
});
|
573
|
+
}
|
574
|
+
}
|
575
|
+
|
576
|
+
{
|
577
|
+
logger_mjs.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
|
578
|
+
}
|
579
|
+
}
|
580
|
+
/**
|
581
|
+
* Registers a sync event with a tag unique to this instance.
|
582
|
+
*/
|
583
|
+
|
584
|
+
|
585
|
+
async registerSync() {
|
586
|
+
if ('sync' in registration) {
|
587
|
+
try {
|
588
|
+
await registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
589
|
+
} catch (err) {
|
590
|
+
// This means the registration failed for some reason, possibly due to
|
591
|
+
// the user disabling it.
|
592
|
+
{
|
593
|
+
logger_mjs.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
|
594
|
+
}
|
595
|
+
}
|
596
|
+
}
|
597
|
+
}
|
598
|
+
/**
|
599
|
+
* In sync-supporting browsers, this adds a listener for the sync event.
|
600
|
+
* In non-sync-supporting browsers, this will retry the queue on service
|
601
|
+
* worker startup.
|
602
|
+
*
|
603
|
+
* @private
|
604
|
+
*/
|
605
|
+
|
606
|
+
|
607
|
+
_addSyncListener() {
|
608
|
+
if ('sync' in registration) {
|
609
|
+
self.addEventListener('sync', event => {
|
610
|
+
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
611
|
+
{
|
612
|
+
logger_mjs.logger.log(`Background sync for tag '${event.tag}'` + `has been received`);
|
613
|
+
}
|
614
|
+
|
615
|
+
const syncComplete = async () => {
|
616
|
+
this._syncInProgress = true;
|
617
|
+
let syncError;
|
618
|
+
|
619
|
+
try {
|
620
|
+
await this._onSync({
|
621
|
+
queue: this
|
622
|
+
});
|
623
|
+
} catch (error) {
|
624
|
+
syncError = error; // Rethrow the error. Note: the logic in the finally clause
|
625
|
+
// will run before this gets rethrown.
|
626
|
+
|
627
|
+
throw syncError;
|
628
|
+
} finally {
|
629
|
+
// New items may have been added to the queue during the sync,
|
630
|
+
// so we need to register for a new sync if that's happened...
|
631
|
+
// Unless there was an error during the sync, in which
|
632
|
+
// case the browser will automatically retry later, as long
|
633
|
+
// as `event.lastChance` is not true.
|
634
|
+
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
|
635
|
+
await this.registerSync();
|
636
|
+
}
|
637
|
+
|
638
|
+
this._syncInProgress = false;
|
639
|
+
this._requestsAddedDuringSync = false;
|
640
|
+
}
|
641
|
+
};
|
642
|
+
|
643
|
+
event.waitUntil(syncComplete());
|
644
|
+
}
|
645
|
+
});
|
646
|
+
} else {
|
647
|
+
{
|
648
|
+
logger_mjs.logger.log(`Background sync replaying without background sync event`);
|
649
|
+
} // If the browser doesn't support background sync, retry
|
650
|
+
// every time the service worker starts up as a fallback.
|
651
|
+
|
652
|
+
|
653
|
+
this._onSync({
|
654
|
+
queue: this
|
655
|
+
});
|
656
|
+
}
|
657
|
+
}
|
658
|
+
/**
|
659
|
+
* Returns the set of queue names. This is primarily used to reset the list
|
660
|
+
* of queue names in tests.
|
661
|
+
*
|
662
|
+
* @return {Set}
|
663
|
+
*
|
664
|
+
* @private
|
665
|
+
*/
|
666
|
+
|
667
|
+
|
668
|
+
static get _queueNames() {
|
669
|
+
return queueNames;
|
670
|
+
}
|
671
|
+
|
672
|
+
}
|
673
|
+
|
674
|
+
/*
|
675
|
+
Copyright 2018 Google LLC
|
676
|
+
|
677
|
+
Use of this source code is governed by an MIT-style
|
678
|
+
license that can be found in the LICENSE file or at
|
679
|
+
https://opensource.org/licenses/MIT.
|
680
|
+
*/
|
681
|
+
/**
|
682
|
+
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
683
|
+
* easier to add failed requests to a background sync Queue.
|
684
|
+
*
|
685
|
+
* @memberof workbox.backgroundSync
|
686
|
+
*/
|
687
|
+
|
688
|
+
class Plugin {
|
689
|
+
/**
|
690
|
+
* @param {...*} queueArgs Args to forward to the composed Queue instance.
|
691
|
+
* See the [Queue]{@link workbox.backgroundSync.Queue} documentation for
|
692
|
+
* parameter details.
|
693
|
+
*/
|
694
|
+
constructor(...queueArgs) {
|
695
|
+
this._queue = new Queue(...queueArgs);
|
696
|
+
this.fetchDidFail = this.fetchDidFail.bind(this);
|
697
|
+
}
|
698
|
+
/**
|
699
|
+
* @param {Object} options
|
700
|
+
* @param {Request} options.request
|
701
|
+
* @private
|
702
|
+
*/
|
703
|
+
|
704
|
+
|
705
|
+
async fetchDidFail({
|
706
|
+
request
|
707
|
+
}) {
|
708
|
+
await this._queue.pushRequest({
|
709
|
+
request
|
710
|
+
});
|
711
|
+
}
|
712
|
+
|
713
|
+
}
|
714
|
+
|
715
|
+
/*
|
716
|
+
Copyright 2018 Google LLC
|
717
|
+
|
718
|
+
Use of this source code is governed by an MIT-style
|
719
|
+
license that can be found in the LICENSE file or at
|
720
|
+
https://opensource.org/licenses/MIT.
|
721
|
+
*/
|
722
|
+
|
723
|
+
exports.Queue = Queue;
|
724
|
+
exports.Plugin = Plugin;
|
725
|
+
|
726
|
+
return exports;
|
727
|
+
|
728
|
+
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private));
|
729
|
+
//# sourceMappingURL=workbox-background-sync.dev.js.map
|