appscms-tools-theme 2.9.5 → 2.9.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/LICENSE.txt +21 -21
- data/README.md +50 -50
- data/_data/about/en/about.json +16 -16
- data/_data/blog/alertbar.yml +3 -3
- data/_data/blog/authors.yml +11 -11
- data/_data/blog/blog.yml +4 -4
- data/_data/blog/nav.json +13 -13
- data/_data/blog/share.yml +28 -28
- data/_data/calculators/en/biology-calc/bulb-spacing.json +78 -78
- data/_data/calculators/en/biology-calc/cat-chocolate-toxicity.json +52 -52
- data/_data/calculators/en/chemistry-calc/activation-energy.json +44 -44
- data/_data/calculators/en/finance-calc/401k.json +122 -122
- data/_data/contact/en/contact.json +38 -38
- data/_data/disclaimer/en/disclaimer.json +37 -37
- data/_data/download/en/download.json +35 -35
- data/_data/feature/en/3_shop_posters.json +351 -351
- data/_data/feature/en/allele-frequency.json +246 -246
- data/_data/feature/en/batch-conversion.json +77 -77
- data/_data/feature/en/compress-pdf.json +202 -202
- data/_data/feature/en/face-detection.json +52 -52
- data/_data/feature/en/split-pdf.json +115 -115
- data/_data/feature/en/theframe.json +56 -56
- data/_data/feature/hi/compress-pdf.json +81 -81
- data/_data/feature/tr/compress-pdf.json +80 -80
- data/_data/filecomparison/en/filecomparison.json +38 -38
- data/_data/fileinfo/en/fileinfo.json +15 -15
- data/_data/footer/en/data.json +164 -164
- data/_data/footer/hi/data.json +68 -68
- data/_data/footer/networksites.json +15 -15
- data/_data/header/en/data.json +173 -173
- data/_data/header/hi/data.json +144 -144
- data/_data/home/en/en.json +285 -285
- data/_data/home/en/photoeffects.json +23 -23
- data/_data/home/en/posters.json +323 -323
- data/_data/home/hi/hi.json +86 -86
- data/_data/home-1/en/en.json +229 -229
- data/_data/languagesupport/compress-pdf-langs.json +11 -11
- data/_data/languagesupport/index-langs.json +13 -13
- data/_data/photo-categories.json +104 -104
- data/_data/photoeffects.json +31 -31
- data/_data/privacy/en/privacyPolicy.json +265 -265
- data/_data/termAndCondition/en/termAndCondition.json +358 -358
- data/_includes/Rating/rating.html +92 -92
- data/_includes/Rating/structureddata.html +53 -53
- data/_includes/Usp/usp.html +95 -95
- data/_includes/adblocker.html +135 -135
- data/_includes/adsense/adsense.html +7 -7
- data/_includes/alternates/alternates.html +45 -45
- data/_includes/author_bio.html +16 -16
- data/_includes/authors/authors.html +67 -67
- data/_includes/batch-conversion.html +65 -65
- data/_includes/cssfile/links.html +11 -11
- data/_includes/custom-head.html +136 -136
- data/_includes/customblog/recentposts.html +67 -67
- data/_includes/customblog/relatedposts.html +203 -203
- data/_includes/disqus_comments.html +10 -10
- data/_includes/dropdown/langdropdown.html +22 -22
- data/_includes/feature.html +37 -37
- data/_includes/fileformat/Tabularcompariosn.html +158 -158
- data/_includes/fileformat/comparisonfiles.html +151 -151
- data/_includes/fileformat/fileformatdetail.html +101 -101
- data/_includes/fileformat/fileinfoformat.html +93 -93
- data/_includes/footer/index.html +534 -534
- data/_includes/google-analytics.html +22 -22
- data/_includes/head/index.html +235 -235
- data/_includes/header/blogHeader.html +34 -34
- data/_includes/header/index.html +188 -188
- data/_includes/languages/languages.html +60 -60
- data/_includes/monumetric/ads.html +57 -57
- data/_includes/monumetric/monumetric.html +57 -57
- data/_includes/monumetric/profitablecpmgate.html +51 -51
- data/_includes/nofiletransfer/nofiletransfer.html +38 -38
- data/_includes/paginationBlogPage.html +71 -71
- data/_includes/paginationPostPage.html +14 -14
- data/_includes/postauthorbio.html +17 -17
- data/_includes/postbox.html +31 -31
- data/_includes/script.html +58 -58
- data/_includes/section/alertbar.html +11 -11
- data/_includes/section/count.html +36 -36
- data/_includes/section/recent_posts.html +65 -65
- data/_includes/section/related_categories_post.html +198 -198
- data/_includes/share/socialshare.html +20 -20
- data/_includes/staticfooter.html +67 -67
- data/_layouts/aboutUs.html +34 -34
- data/_layouts/batch.html +102 -102
- data/_layouts/blog-1.html +83 -83
- data/_layouts/blog.html +94 -94
- data/_layouts/calculator.html +89 -89
- data/_layouts/categories.html +33 -33
- data/_layouts/contactUs.html +124 -124
- data/_layouts/default.html +1 -1
- data/_layouts/disclaimer.html +123 -123
- data/_layouts/download.html +299 -299
- data/_layouts/feature-1.html +320 -320
- data/_layouts/feature-download.html +308 -308
- data/_layouts/feature.html +258 -258
- data/_layouts/featureResultPage.html +65 -65
- data/_layouts/fileInfo.html +39 -39
- data/_layouts/filecomparison.html +85 -85
- data/_layouts/frame.html +222 -434
- data/_layouts/help.html +23 -23
- data/_layouts/home-1.html +229 -229
- data/_layouts/home.html +249 -249
- data/_layouts/homeResultPage.html +31 -31
- data/_layouts/imagekit.html +125 -125
- data/_layouts/page.html +5 -5
- data/_layouts/photo-effects-home.html +191 -193
- data/_layouts/post.html +177 -177
- data/_layouts/privacyPolicy.html +637 -637
- data/_layouts/termAndCondition.html +646 -646
- data/assets/cross.svg +4 -4
- data/assets/css/adblocker.css +187 -187
- data/assets/css/batch.css +437 -437
- data/assets/css/blog-1.css +65 -65
- data/assets/css/blog.css +491 -491
- data/assets/css/bootstrap.min.css +6 -6
- data/assets/css/calculators.css +47 -47
- data/assets/css/common.css +550 -550
- data/assets/css/feature-1.css +290 -290
- data/assets/css/filecomparison.css +25 -25
- data/assets/css/frame.css +780 -780
- data/assets/css/home-1.css +187 -187
- data/assets/css/imagekit.css +840 -840
- data/assets/css/tools.css +1278 -1278
- data/assets/facebook.svg +4 -4
- data/assets/images/abp.svg +41 -41
- data/assets/images/adblock.svg +266 -266
- data/assets/images/addimg.svg +2 -2
- data/assets/images/convert.svg +8 -8
- data/assets/images/safevideoconverter.svg +23 -23
- data/assets/images/uo.svg +41 -41
- data/assets/images/vectorpaint.svg +5 -5
- data/assets/instagram.svg +4 -4
- data/assets/js/TopScroll.js +8 -8
- data/assets/js/adBlocker.js +44 -44
- data/assets/js/ads.js +8 -8
- data/assets/js/append-div.js +10 -10
- data/assets/js/batch.js +229 -229
- data/assets/js/calculator-tooltip.js +3 -3
- data/assets/js/face-api.js +38064 -38064
- data/assets/js/face-detection.js +303 -303
- data/assets/js/featureResult.js +48 -48
- data/assets/js/frame.js +263 -263
- data/assets/js/googledrive.js +155 -155
- data/assets/js/homeResult.js +36 -36
- data/assets/js/manifest.json +16 -16
- data/assets/js/multiselect.js +157 -157
- data/assets/js/perspective.min.js +182 -182
- data/assets/js/photo-effects.json +84 -84
- data/assets/js/redirectResult.js +9 -9
- data/assets/js/testing-batch.js +39 -39
- data/assets/js/theme.js +44 -44
- data/assets/linkdin.svg +4 -4
- data/assets/noserverupload.svg +4 -4
- data/assets/pdf.svg +20 -20
- data/assets/secure.svg +43 -43
- data/assets/star.svg +4 -4
- data/assets/twitter.svg +4 -4
- data/assets/youtube.svg +4 -4
- metadata +7 -7
data/assets/js/face-detection.js
CHANGED
@@ -1,303 +1,303 @@
|
|
1
|
-
const getScript = document.currentScript
|
2
|
-
const pageTool = getScript.dataset.tool
|
3
|
-
const lang = getScript.dataset.lang
|
4
|
-
const inputBox = document.querySelector('#Inputbox')
|
5
|
-
const fileDropBox = document.querySelector('.custom-box')
|
6
|
-
const cropModal = document.querySelector('.crop-image-modal-container')
|
7
|
-
const exampleModal = document.querySelector('.example-images-modal-container')
|
8
|
-
const workspace = document.getElementById('workspace')
|
9
|
-
const canvasPanel = document.getElementById('canvas-panel')
|
10
|
-
const download = document.querySelector('#download-button')
|
11
|
-
const form = document.querySelector('#effect-form')
|
12
|
-
let files = []
|
13
|
-
let cropWidth = null
|
14
|
-
let cropHeight = null
|
15
|
-
let cropper = null
|
16
|
-
let cropInputWidth = null
|
17
|
-
let index = 0
|
18
|
-
let cropInputHeight = null
|
19
|
-
let image = null
|
20
|
-
const showLoader = () => {
|
21
|
-
showLoading()
|
22
|
-
}
|
23
|
-
const closeLoader = () => {}
|
24
|
-
const clickInput = (e) => {
|
25
|
-
console.log(`#file-${e.dataset.index}`)
|
26
|
-
document.querySelector(`#file-${e.dataset.index}`).click()
|
27
|
-
}
|
28
|
-
let featureData = null
|
29
|
-
|
30
|
-
fetch('/assets/js/photo-effects.json')
|
31
|
-
.then((response) => response.json())
|
32
|
-
.then((data) => {
|
33
|
-
featureData = data.find((i) => i.name === form.dataset.featureName)
|
34
|
-
console.log(featureData)
|
35
|
-
})
|
36
|
-
const fileOnChange = (e) => {
|
37
|
-
index = Number(e.dataset.index)
|
38
|
-
let reader = new FileReader()
|
39
|
-
reader.onload = (event) => {
|
40
|
-
cropModal.style.display = 'flex'
|
41
|
-
if (cropper === null) {
|
42
|
-
cropImage(event.target.result, e.id)
|
43
|
-
} else {
|
44
|
-
updateCropper(event.target.result, e.id)
|
45
|
-
}
|
46
|
-
}
|
47
|
-
reader.readAsDataURL(e.files[0])
|
48
|
-
}
|
49
|
-
const closeModal = () => {
|
50
|
-
cropModal.style.display = 'none'
|
51
|
-
}
|
52
|
-
const closeExamplesModal = () => {
|
53
|
-
exampleModal.style.display = 'none'
|
54
|
-
}
|
55
|
-
form.addEventListener('submit', (e) => {
|
56
|
-
e.preventDefault()
|
57
|
-
drawImage()
|
58
|
-
})
|
59
|
-
async function face(imageSrc) {
|
60
|
-
return new Promise(async (resolve, reject) => {
|
61
|
-
await faceapi.nets.tinyFaceDetector.load('/assets/js/models'),
|
62
|
-
await faceapi.nets.faceLandmark68Net.load('/assets/js/models'),
|
63
|
-
await faceapi.nets.faceRecognitionNet.load('/assets/js/models'),
|
64
|
-
await faceapi.nets.faceExpressionNet.load('/assets/js/models')
|
65
|
-
|
66
|
-
const img = document.createElement('img')
|
67
|
-
img.src = imageSrc
|
68
|
-
img.onload = async () => {
|
69
|
-
const detections = await faceapi.detectSingleFace(
|
70
|
-
img,
|
71
|
-
new faceapi.TinyFaceDetectorOptions()
|
72
|
-
)
|
73
|
-
let faceDescriptions = await faceapi.detectSingleFace(
|
74
|
-
img,
|
75
|
-
new faceapi.TinyFaceDetectorOptions()
|
76
|
-
)
|
77
|
-
const canvas = document.createElement('canvas')
|
78
|
-
faceapi.matchDimensions(canvas, img)
|
79
|
-
faceDescriptions = faceapi.resizeResults(faceDescriptions, img)
|
80
|
-
faceapi.draw.drawDetections(canvas, faceDescriptions)
|
81
|
-
let value = extractFaceFromBox(img, detections.box, canvas)
|
82
|
-
resolve([value])
|
83
|
-
}
|
84
|
-
})
|
85
|
-
}
|
86
|
-
async function extractFaceFromBox(imageRef, box) {
|
87
|
-
const regionsToExtract = [
|
88
|
-
new faceapi.Rect(box.x, box.y, box.width, box.height),
|
89
|
-
]
|
90
|
-
let faceImages = await faceapi.extractFaces(imageRef, regionsToExtract)
|
91
|
-
if (faceImages.length === 0) {
|
92
|
-
return 'no face found'
|
93
|
-
} else {
|
94
|
-
return faceImages[0].toDataURL()
|
95
|
-
}
|
96
|
-
}
|
97
|
-
const drawInputImage = (ctx, item, indexValue) => {
|
98
|
-
return new Promise((resolve, reject) => {
|
99
|
-
let image = document.createElement('img')
|
100
|
-
image.src = files[indexValue]
|
101
|
-
image.onload = () => {
|
102
|
-
ctx.save()
|
103
|
-
image.width = Number(item.width)
|
104
|
-
image.height = Number(item.height)
|
105
|
-
if (item.filter) {
|
106
|
-
ctx.filter = item.filter
|
107
|
-
}
|
108
|
-
if (item.rotate) {
|
109
|
-
ctx.rotate((item.rotate * Math.PI) / 180)
|
110
|
-
}
|
111
|
-
let perspectiveKey = 'perspective' in item
|
112
|
-
if (!perspectiveKey) {
|
113
|
-
ctx.drawImage(
|
114
|
-
image,
|
115
|
-
Number(item.x),
|
116
|
-
Number(item.y),
|
117
|
-
image.width,
|
118
|
-
image.height
|
119
|
-
)
|
120
|
-
}
|
121
|
-
if (item.translate) {
|
122
|
-
ctx.translate(item.translate.x, item.translate.y)
|
123
|
-
}
|
124
|
-
if (perspectiveKey) {
|
125
|
-
let p = new Perspective(ctx, image)
|
126
|
-
p.draw([
|
127
|
-
[item.perspective.topLeft.x, item.perspective.topLeft.y],
|
128
|
-
[item.perspective.topRight.x, item.perspective.topRight.y],
|
129
|
-
[item.perspective.bottomRight.x, item.perspective.bottomRight.y],
|
130
|
-
[item.perspective.bottomLeft.x, item.perspective.bottomLeft.y],
|
131
|
-
])
|
132
|
-
}
|
133
|
-
|
134
|
-
if (item.skew) {
|
135
|
-
ctx.setTransform(1, item.skew.x, item.skew.y, 1, 0, 0)
|
136
|
-
}
|
137
|
-
|
138
|
-
ctx.restore()
|
139
|
-
resolve()
|
140
|
-
}
|
141
|
-
})
|
142
|
-
}
|
143
|
-
|
144
|
-
const drawImage = () => {
|
145
|
-
workspace.style.display = 'block'
|
146
|
-
document.querySelector('#upper-panel').style.display = 'none'
|
147
|
-
document.querySelector('#saving-data').style.display = 'flex'
|
148
|
-
let img = new Image()
|
149
|
-
img.src = featureData.effectImagePath
|
150
|
-
var canvas = document.createElement('canvas')
|
151
|
-
var ctx = canvas.getContext('2d')
|
152
|
-
img.onload = () => {
|
153
|
-
canvas.width = img.width
|
154
|
-
canvas.height = img.height
|
155
|
-
Promise.all(
|
156
|
-
featureData.elements.map((item, indexValue) => {
|
157
|
-
if (item.type === 'image') {
|
158
|
-
return new Promise((resolve, reject) => {
|
159
|
-
drawInputImage(ctx, item, indexValue, canvas).then(() => {
|
160
|
-
resolve()
|
161
|
-
})
|
162
|
-
})
|
163
|
-
}
|
164
|
-
})
|
165
|
-
).then(() => {
|
166
|
-
ctx.filter = 'none'
|
167
|
-
ctx.drawImage(img, 0, 0, canvas.width, canvas.height)
|
168
|
-
featureData.elements.map((item, indexValue) => {
|
169
|
-
if (item.type === 'text') {
|
170
|
-
let myFont = new FontFace(item.font, `url(${item.fontPath})`)
|
171
|
-
myFont.load().then(function (font) {
|
172
|
-
ctx.save()
|
173
|
-
document.fonts.add(font)
|
174
|
-
let textValue = document.querySelector(`#${item.id}`).value
|
175
|
-
if (textValue.length > 10 && item.fontSizeOption1) {
|
176
|
-
item.fontSize = item.fontSizeOption1
|
177
|
-
}
|
178
|
-
if (textValue.length > 12 && item.fontSizeOption2) {
|
179
|
-
item.fontSize = item.fontSizeOption2
|
180
|
-
}
|
181
|
-
if (textValue.length > 15 && item.fontSizeOption3) {
|
182
|
-
item.fontSize = item.fontSizeOption3
|
183
|
-
}
|
184
|
-
if (textValue.length > 20 && item.fontSizeOption4) {
|
185
|
-
item.fontSize = item.fontSizeOption4
|
186
|
-
}
|
187
|
-
ctx.font = `${item.fontSize}px ${item.font}`
|
188
|
-
if (item.shadowColor) {
|
189
|
-
ctx.shadowColor = `${item.shadowColor}`
|
190
|
-
}
|
191
|
-
|
192
|
-
if (item.shadowOffsetX) {
|
193
|
-
ctx.shadowOffsetX = 3
|
194
|
-
}
|
195
|
-
if (item.shadowOffsetY) {
|
196
|
-
ctx.shadowOffsetY = 3
|
197
|
-
}
|
198
|
-
if (item.shadowBlur) {
|
199
|
-
ctx.shadowBlur = 2
|
200
|
-
}
|
201
|
-
if (item.rotate) {
|
202
|
-
ctx.rotate((item.rotate * Math.PI) / 180)
|
203
|
-
}
|
204
|
-
ctx.textAlign = 'center'
|
205
|
-
ctx.fillStyle = `${item.color}`
|
206
|
-
ctx.fillText(textValue, item.x, item.y)
|
207
|
-
ctx.restore()
|
208
|
-
})
|
209
|
-
}
|
210
|
-
if (item.type === 'rectangle') {
|
211
|
-
}
|
212
|
-
})
|
213
|
-
canvasPanel.innerHTML = ''
|
214
|
-
document.querySelector('#saving-data').style.display = 'none'
|
215
|
-
canvasPanel.appendChild(canvas)
|
216
|
-
})
|
217
|
-
}
|
218
|
-
}
|
219
|
-
const cropImage = (result, id) => {
|
220
|
-
let image = new Image()
|
221
|
-
image.onload = () => {
|
222
|
-
let img = document.createElement('img')
|
223
|
-
img.src = result
|
224
|
-
img.id = 'image'
|
225
|
-
document.querySelector('.crop-image-modal-body').appendChild(img)
|
226
|
-
cropper = new Cropper(img, {
|
227
|
-
viewMode: 3,
|
228
|
-
ready() {
|
229
|
-
let find = featureData.elements.find((i) => i.id === id)
|
230
|
-
cropper.setAspectRatio(Number(find.width) / Number(find.height))
|
231
|
-
cropModal.style.display = 'flex'
|
232
|
-
this.cropper.crop()
|
233
|
-
},
|
234
|
-
crop(event) {
|
235
|
-
cropWidth = Math.round(event.detail.width)
|
236
|
-
cropHeight = Math.round(event.detail.height)
|
237
|
-
},
|
238
|
-
})
|
239
|
-
}
|
240
|
-
image.src = result
|
241
|
-
}
|
242
|
-
const updateCropper = (result, id) => {
|
243
|
-
cropper.destroy()
|
244
|
-
document.querySelector('.crop-image-modal-body').innerHTML = ''
|
245
|
-
cropImage(result, id)
|
246
|
-
}
|
247
|
-
document.querySelector('#crop').addEventListener('click', () => {
|
248
|
-
let cropperImg = cropper
|
249
|
-
.getCroppedCanvas({
|
250
|
-
width: cropWidth,
|
251
|
-
height: cropHeight,
|
252
|
-
})
|
253
|
-
.toDataURL()
|
254
|
-
face(cropperImg).then(([value]) => {
|
255
|
-
value.then((result) => {
|
256
|
-
files[index - 1] = result
|
257
|
-
document.querySelector(`#image-pre-${index}`).src = result
|
258
|
-
document.querySelector(`#image-pre-${index}`).style.display = 'block'
|
259
|
-
document.querySelector(`#cam-${index}`).style.display = 'none'
|
260
|
-
cropModal.style.display = 'none'
|
261
|
-
})
|
262
|
-
})
|
263
|
-
})
|
264
|
-
const openExamplesModal = () => {
|
265
|
-
exampleModal.style.display = 'flex'
|
266
|
-
}
|
267
|
-
let inputFile = ''
|
268
|
-
const handleFile = (file) => {
|
269
|
-
cropModal.style.display = 'flex'
|
270
|
-
document.querySelector('#file-loader').style.display = 'flex'
|
271
|
-
document.querySelector('.file-input').style.display = 'none'
|
272
|
-
inputFile = file
|
273
|
-
if (file) {
|
274
|
-
const reader = new FileReader()
|
275
|
-
reader.onload = (e) => {
|
276
|
-
if (e.target.result) {
|
277
|
-
cropImage(e.target.result)
|
278
|
-
}
|
279
|
-
}
|
280
|
-
reader.readAsDataURL(file)
|
281
|
-
}
|
282
|
-
}
|
283
|
-
const showLoading = () => {
|
284
|
-
document.querySelector('#file-loader').style.display = 'flex'
|
285
|
-
document.querySelector('.file-input').style.display = 'none'
|
286
|
-
}
|
287
|
-
const stopLoading = () => {
|
288
|
-
fileDropBox.style.display = 'none'
|
289
|
-
}
|
290
|
-
download.addEventListener('click', () => {
|
291
|
-
let canvas = document.querySelector('canvas')
|
292
|
-
let url = canvas.toDataURL(`image/png`)
|
293
|
-
let a = document.createElement('a')
|
294
|
-
a.href = url
|
295
|
-
a.download = `safeimagekit-photo-effect-image.png`
|
296
|
-
document.body.appendChild(a)
|
297
|
-
a.click()
|
298
|
-
if (lang === 'en') {
|
299
|
-
window.location.href = `/download?tool=${pageTool}`
|
300
|
-
} else {
|
301
|
-
window.location.href = `/${lang}/download?tool=${pageTool}`
|
302
|
-
}
|
303
|
-
})
|
1
|
+
const getScript = document.currentScript
|
2
|
+
const pageTool = getScript.dataset.tool
|
3
|
+
const lang = getScript.dataset.lang
|
4
|
+
const inputBox = document.querySelector('#Inputbox')
|
5
|
+
const fileDropBox = document.querySelector('.custom-box')
|
6
|
+
const cropModal = document.querySelector('.crop-image-modal-container')
|
7
|
+
const exampleModal = document.querySelector('.example-images-modal-container')
|
8
|
+
const workspace = document.getElementById('workspace')
|
9
|
+
const canvasPanel = document.getElementById('canvas-panel')
|
10
|
+
const download = document.querySelector('#download-button')
|
11
|
+
const form = document.querySelector('#effect-form')
|
12
|
+
let files = []
|
13
|
+
let cropWidth = null
|
14
|
+
let cropHeight = null
|
15
|
+
let cropper = null
|
16
|
+
let cropInputWidth = null
|
17
|
+
let index = 0
|
18
|
+
let cropInputHeight = null
|
19
|
+
let image = null
|
20
|
+
const showLoader = () => {
|
21
|
+
showLoading()
|
22
|
+
}
|
23
|
+
const closeLoader = () => {}
|
24
|
+
const clickInput = (e) => {
|
25
|
+
console.log(`#file-${e.dataset.index}`)
|
26
|
+
document.querySelector(`#file-${e.dataset.index}`).click()
|
27
|
+
}
|
28
|
+
let featureData = null
|
29
|
+
|
30
|
+
fetch('/assets/js/photo-effects.json')
|
31
|
+
.then((response) => response.json())
|
32
|
+
.then((data) => {
|
33
|
+
featureData = data.find((i) => i.name === form.dataset.featureName)
|
34
|
+
console.log(featureData)
|
35
|
+
})
|
36
|
+
const fileOnChange = (e) => {
|
37
|
+
index = Number(e.dataset.index)
|
38
|
+
let reader = new FileReader()
|
39
|
+
reader.onload = (event) => {
|
40
|
+
cropModal.style.display = 'flex'
|
41
|
+
if (cropper === null) {
|
42
|
+
cropImage(event.target.result, e.id)
|
43
|
+
} else {
|
44
|
+
updateCropper(event.target.result, e.id)
|
45
|
+
}
|
46
|
+
}
|
47
|
+
reader.readAsDataURL(e.files[0])
|
48
|
+
}
|
49
|
+
const closeModal = () => {
|
50
|
+
cropModal.style.display = 'none'
|
51
|
+
}
|
52
|
+
const closeExamplesModal = () => {
|
53
|
+
exampleModal.style.display = 'none'
|
54
|
+
}
|
55
|
+
form.addEventListener('submit', (e) => {
|
56
|
+
e.preventDefault()
|
57
|
+
drawImage()
|
58
|
+
})
|
59
|
+
async function face(imageSrc) {
|
60
|
+
return new Promise(async (resolve, reject) => {
|
61
|
+
await faceapi.nets.tinyFaceDetector.load('/assets/js/models'),
|
62
|
+
await faceapi.nets.faceLandmark68Net.load('/assets/js/models'),
|
63
|
+
await faceapi.nets.faceRecognitionNet.load('/assets/js/models'),
|
64
|
+
await faceapi.nets.faceExpressionNet.load('/assets/js/models')
|
65
|
+
|
66
|
+
const img = document.createElement('img')
|
67
|
+
img.src = imageSrc
|
68
|
+
img.onload = async () => {
|
69
|
+
const detections = await faceapi.detectSingleFace(
|
70
|
+
img,
|
71
|
+
new faceapi.TinyFaceDetectorOptions()
|
72
|
+
)
|
73
|
+
let faceDescriptions = await faceapi.detectSingleFace(
|
74
|
+
img,
|
75
|
+
new faceapi.TinyFaceDetectorOptions()
|
76
|
+
)
|
77
|
+
const canvas = document.createElement('canvas')
|
78
|
+
faceapi.matchDimensions(canvas, img)
|
79
|
+
faceDescriptions = faceapi.resizeResults(faceDescriptions, img)
|
80
|
+
faceapi.draw.drawDetections(canvas, faceDescriptions)
|
81
|
+
let value = extractFaceFromBox(img, detections.box, canvas)
|
82
|
+
resolve([value])
|
83
|
+
}
|
84
|
+
})
|
85
|
+
}
|
86
|
+
async function extractFaceFromBox(imageRef, box) {
|
87
|
+
const regionsToExtract = [
|
88
|
+
new faceapi.Rect(box.x, box.y, box.width, box.height),
|
89
|
+
]
|
90
|
+
let faceImages = await faceapi.extractFaces(imageRef, regionsToExtract)
|
91
|
+
if (faceImages.length === 0) {
|
92
|
+
return 'no face found'
|
93
|
+
} else {
|
94
|
+
return faceImages[0].toDataURL()
|
95
|
+
}
|
96
|
+
}
|
97
|
+
const drawInputImage = (ctx, item, indexValue) => {
|
98
|
+
return new Promise((resolve, reject) => {
|
99
|
+
let image = document.createElement('img')
|
100
|
+
image.src = files[indexValue]
|
101
|
+
image.onload = () => {
|
102
|
+
ctx.save()
|
103
|
+
image.width = Number(item.width)
|
104
|
+
image.height = Number(item.height)
|
105
|
+
if (item.filter) {
|
106
|
+
ctx.filter = item.filter
|
107
|
+
}
|
108
|
+
if (item.rotate) {
|
109
|
+
ctx.rotate((item.rotate * Math.PI) / 180)
|
110
|
+
}
|
111
|
+
let perspectiveKey = 'perspective' in item
|
112
|
+
if (!perspectiveKey) {
|
113
|
+
ctx.drawImage(
|
114
|
+
image,
|
115
|
+
Number(item.x),
|
116
|
+
Number(item.y),
|
117
|
+
image.width,
|
118
|
+
image.height
|
119
|
+
)
|
120
|
+
}
|
121
|
+
if (item.translate) {
|
122
|
+
ctx.translate(item.translate.x, item.translate.y)
|
123
|
+
}
|
124
|
+
if (perspectiveKey) {
|
125
|
+
let p = new Perspective(ctx, image)
|
126
|
+
p.draw([
|
127
|
+
[item.perspective.topLeft.x, item.perspective.topLeft.y],
|
128
|
+
[item.perspective.topRight.x, item.perspective.topRight.y],
|
129
|
+
[item.perspective.bottomRight.x, item.perspective.bottomRight.y],
|
130
|
+
[item.perspective.bottomLeft.x, item.perspective.bottomLeft.y],
|
131
|
+
])
|
132
|
+
}
|
133
|
+
|
134
|
+
if (item.skew) {
|
135
|
+
ctx.setTransform(1, item.skew.x, item.skew.y, 1, 0, 0)
|
136
|
+
}
|
137
|
+
|
138
|
+
ctx.restore()
|
139
|
+
resolve()
|
140
|
+
}
|
141
|
+
})
|
142
|
+
}
|
143
|
+
|
144
|
+
const drawImage = () => {
|
145
|
+
workspace.style.display = 'block'
|
146
|
+
document.querySelector('#upper-panel').style.display = 'none'
|
147
|
+
document.querySelector('#saving-data').style.display = 'flex'
|
148
|
+
let img = new Image()
|
149
|
+
img.src = featureData.effectImagePath
|
150
|
+
var canvas = document.createElement('canvas')
|
151
|
+
var ctx = canvas.getContext('2d')
|
152
|
+
img.onload = () => {
|
153
|
+
canvas.width = img.width
|
154
|
+
canvas.height = img.height
|
155
|
+
Promise.all(
|
156
|
+
featureData.elements.map((item, indexValue) => {
|
157
|
+
if (item.type === 'image') {
|
158
|
+
return new Promise((resolve, reject) => {
|
159
|
+
drawInputImage(ctx, item, indexValue, canvas).then(() => {
|
160
|
+
resolve()
|
161
|
+
})
|
162
|
+
})
|
163
|
+
}
|
164
|
+
})
|
165
|
+
).then(() => {
|
166
|
+
ctx.filter = 'none'
|
167
|
+
ctx.drawImage(img, 0, 0, canvas.width, canvas.height)
|
168
|
+
featureData.elements.map((item, indexValue) => {
|
169
|
+
if (item.type === 'text') {
|
170
|
+
let myFont = new FontFace(item.font, `url(${item.fontPath})`)
|
171
|
+
myFont.load().then(function (font) {
|
172
|
+
ctx.save()
|
173
|
+
document.fonts.add(font)
|
174
|
+
let textValue = document.querySelector(`#${item.id}`).value
|
175
|
+
if (textValue.length > 10 && item.fontSizeOption1) {
|
176
|
+
item.fontSize = item.fontSizeOption1
|
177
|
+
}
|
178
|
+
if (textValue.length > 12 && item.fontSizeOption2) {
|
179
|
+
item.fontSize = item.fontSizeOption2
|
180
|
+
}
|
181
|
+
if (textValue.length > 15 && item.fontSizeOption3) {
|
182
|
+
item.fontSize = item.fontSizeOption3
|
183
|
+
}
|
184
|
+
if (textValue.length > 20 && item.fontSizeOption4) {
|
185
|
+
item.fontSize = item.fontSizeOption4
|
186
|
+
}
|
187
|
+
ctx.font = `${item.fontSize}px ${item.font}`
|
188
|
+
if (item.shadowColor) {
|
189
|
+
ctx.shadowColor = `${item.shadowColor}`
|
190
|
+
}
|
191
|
+
|
192
|
+
if (item.shadowOffsetX) {
|
193
|
+
ctx.shadowOffsetX = 3
|
194
|
+
}
|
195
|
+
if (item.shadowOffsetY) {
|
196
|
+
ctx.shadowOffsetY = 3
|
197
|
+
}
|
198
|
+
if (item.shadowBlur) {
|
199
|
+
ctx.shadowBlur = 2
|
200
|
+
}
|
201
|
+
if (item.rotate) {
|
202
|
+
ctx.rotate((item.rotate * Math.PI) / 180)
|
203
|
+
}
|
204
|
+
ctx.textAlign = 'center'
|
205
|
+
ctx.fillStyle = `${item.color}`
|
206
|
+
ctx.fillText(textValue, item.x, item.y)
|
207
|
+
ctx.restore()
|
208
|
+
})
|
209
|
+
}
|
210
|
+
if (item.type === 'rectangle') {
|
211
|
+
}
|
212
|
+
})
|
213
|
+
canvasPanel.innerHTML = ''
|
214
|
+
document.querySelector('#saving-data').style.display = 'none'
|
215
|
+
canvasPanel.appendChild(canvas)
|
216
|
+
})
|
217
|
+
}
|
218
|
+
}
|
219
|
+
const cropImage = (result, id) => {
|
220
|
+
let image = new Image()
|
221
|
+
image.onload = () => {
|
222
|
+
let img = document.createElement('img')
|
223
|
+
img.src = result
|
224
|
+
img.id = 'image'
|
225
|
+
document.querySelector('.crop-image-modal-body').appendChild(img)
|
226
|
+
cropper = new Cropper(img, {
|
227
|
+
viewMode: 3,
|
228
|
+
ready() {
|
229
|
+
let find = featureData.elements.find((i) => i.id === id)
|
230
|
+
cropper.setAspectRatio(Number(find.width) / Number(find.height))
|
231
|
+
cropModal.style.display = 'flex'
|
232
|
+
this.cropper.crop()
|
233
|
+
},
|
234
|
+
crop(event) {
|
235
|
+
cropWidth = Math.round(event.detail.width)
|
236
|
+
cropHeight = Math.round(event.detail.height)
|
237
|
+
},
|
238
|
+
})
|
239
|
+
}
|
240
|
+
image.src = result
|
241
|
+
}
|
242
|
+
const updateCropper = (result, id) => {
|
243
|
+
cropper.destroy()
|
244
|
+
document.querySelector('.crop-image-modal-body').innerHTML = ''
|
245
|
+
cropImage(result, id)
|
246
|
+
}
|
247
|
+
document.querySelector('#crop').addEventListener('click', () => {
|
248
|
+
let cropperImg = cropper
|
249
|
+
.getCroppedCanvas({
|
250
|
+
width: cropWidth,
|
251
|
+
height: cropHeight,
|
252
|
+
})
|
253
|
+
.toDataURL()
|
254
|
+
face(cropperImg).then(([value]) => {
|
255
|
+
value.then((result) => {
|
256
|
+
files[index - 1] = result
|
257
|
+
document.querySelector(`#image-pre-${index}`).src = result
|
258
|
+
document.querySelector(`#image-pre-${index}`).style.display = 'block'
|
259
|
+
document.querySelector(`#cam-${index}`).style.display = 'none'
|
260
|
+
cropModal.style.display = 'none'
|
261
|
+
})
|
262
|
+
})
|
263
|
+
})
|
264
|
+
const openExamplesModal = () => {
|
265
|
+
exampleModal.style.display = 'flex'
|
266
|
+
}
|
267
|
+
let inputFile = ''
|
268
|
+
const handleFile = (file) => {
|
269
|
+
cropModal.style.display = 'flex'
|
270
|
+
document.querySelector('#file-loader').style.display = 'flex'
|
271
|
+
document.querySelector('.file-input').style.display = 'none'
|
272
|
+
inputFile = file
|
273
|
+
if (file) {
|
274
|
+
const reader = new FileReader()
|
275
|
+
reader.onload = (e) => {
|
276
|
+
if (e.target.result) {
|
277
|
+
cropImage(e.target.result)
|
278
|
+
}
|
279
|
+
}
|
280
|
+
reader.readAsDataURL(file)
|
281
|
+
}
|
282
|
+
}
|
283
|
+
const showLoading = () => {
|
284
|
+
document.querySelector('#file-loader').style.display = 'flex'
|
285
|
+
document.querySelector('.file-input').style.display = 'none'
|
286
|
+
}
|
287
|
+
const stopLoading = () => {
|
288
|
+
fileDropBox.style.display = 'none'
|
289
|
+
}
|
290
|
+
download.addEventListener('click', () => {
|
291
|
+
let canvas = document.querySelector('canvas')
|
292
|
+
let url = canvas.toDataURL(`image/png`)
|
293
|
+
let a = document.createElement('a')
|
294
|
+
a.href = url
|
295
|
+
a.download = `safeimagekit-photo-effect-image.png`
|
296
|
+
document.body.appendChild(a)
|
297
|
+
a.click()
|
298
|
+
if (lang === 'en') {
|
299
|
+
window.location.href = `/download?tool=${pageTool}`
|
300
|
+
} else {
|
301
|
+
window.location.href = `/${lang}/download?tool=${pageTool}`
|
302
|
+
}
|
303
|
+
})
|