appscms-tools-theme 2.7.0 → 2.7.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,303 +1,303 @@
1
- const getScript = document.currentScript
2
- const pageTool = getScript.dataset.tool
3
- const lang = getScript.dataset.lang
4
- const inputBox = document.querySelector('#Inputbox')
5
- const fileDropBox = document.querySelector('.custom-box')
6
- const cropModal = document.querySelector('.crop-image-modal-container')
7
- const exampleModal = document.querySelector('.example-images-modal-container')
8
- const workspace = document.getElementById('workspace')
9
- const canvasPanel = document.getElementById('canvas-panel')
10
- const download = document.querySelector('#download-button')
11
- const form = document.querySelector('#effect-form')
12
- let files = []
13
- let cropWidth = null
14
- let cropHeight = null
15
- let cropper = null
16
- let cropInputWidth = null
17
- let index = 0
18
- let cropInputHeight = null
19
- let image = null
20
- const showLoader = () => {
21
- showLoading()
22
- }
23
- const closeLoader = () => {}
24
- const clickInput = (e) => {
25
- console.log(`#file-${e.dataset.index}`)
26
- document.querySelector(`#file-${e.dataset.index}`).click()
27
- }
28
- let featureData = null
29
-
30
- fetch('/assets/js/photo-effects.json')
31
- .then((response) => response.json())
32
- .then((data) => {
33
- featureData = data.find((i) => i.name === form.dataset.featureName)
34
- console.log(featureData)
35
- })
36
- const fileOnChange = (e) => {
37
- index = Number(e.dataset.index)
38
- let reader = new FileReader()
39
- reader.onload = (event) => {
40
- cropModal.style.display = 'flex'
41
- if (cropper === null) {
42
- cropImage(event.target.result, e.id)
43
- } else {
44
- updateCropper(event.target.result, e.id)
45
- }
46
- }
47
- reader.readAsDataURL(e.files[0])
48
- }
49
- const closeModal = () => {
50
- cropModal.style.display = 'none'
51
- }
52
- const closeExamplesModal = () => {
53
- exampleModal.style.display = 'none'
54
- }
55
- form.addEventListener('submit', (e) => {
56
- e.preventDefault()
57
- drawImage()
58
- })
59
- async function face(imageSrc) {
60
- return new Promise(async (resolve, reject) => {
61
- await faceapi.nets.tinyFaceDetector.load('/assets/js/models'),
62
- await faceapi.nets.faceLandmark68Net.load('/assets/js/models'),
63
- await faceapi.nets.faceRecognitionNet.load('/assets/js/models'),
64
- await faceapi.nets.faceExpressionNet.load('/assets/js/models')
65
-
66
- const img = document.createElement('img')
67
- img.src = imageSrc
68
- img.onload = async () => {
69
- const detections = await faceapi.detectSingleFace(
70
- img,
71
- new faceapi.TinyFaceDetectorOptions()
72
- )
73
- let faceDescriptions = await faceapi.detectSingleFace(
74
- img,
75
- new faceapi.TinyFaceDetectorOptions()
76
- )
77
- const canvas = document.createElement('canvas')
78
- faceapi.matchDimensions(canvas, img)
79
- faceDescriptions = faceapi.resizeResults(faceDescriptions, img)
80
- faceapi.draw.drawDetections(canvas, faceDescriptions)
81
- let value = extractFaceFromBox(img, detections.box, canvas)
82
- resolve([value])
83
- }
84
- })
85
- }
86
- async function extractFaceFromBox(imageRef, box) {
87
- const regionsToExtract = [
88
- new faceapi.Rect(box.x, box.y, box.width, box.height),
89
- ]
90
- let faceImages = await faceapi.extractFaces(imageRef, regionsToExtract)
91
- if (faceImages.length === 0) {
92
- return 'no face found'
93
- } else {
94
- return faceImages[0].toDataURL()
95
- }
96
- }
97
- const drawInputImage = (ctx, item, indexValue) => {
98
- return new Promise((resolve, reject) => {
99
- let image = document.createElement('img')
100
- image.src = files[indexValue]
101
- image.onload = () => {
102
- ctx.save()
103
- image.width = Number(item.width)
104
- image.height = Number(item.height)
105
- if (item.filter) {
106
- ctx.filter = item.filter
107
- }
108
- if (item.rotate) {
109
- ctx.rotate((item.rotate * Math.PI) / 180)
110
- }
111
- let perspectiveKey = 'perspective' in item
112
- if (!perspectiveKey) {
113
- ctx.drawImage(
114
- image,
115
- Number(item.x),
116
- Number(item.y),
117
- image.width,
118
- image.height
119
- )
120
- }
121
- if (item.translate) {
122
- ctx.translate(item.translate.x, item.translate.y)
123
- }
124
- if (perspectiveKey) {
125
- let p = new Perspective(ctx, image)
126
- p.draw([
127
- [item.perspective.topLeft.x, item.perspective.topLeft.y],
128
- [item.perspective.topRight.x, item.perspective.topRight.y],
129
- [item.perspective.bottomRight.x, item.perspective.bottomRight.y],
130
- [item.perspective.bottomLeft.x, item.perspective.bottomLeft.y],
131
- ])
132
- }
133
-
134
- if (item.skew) {
135
- ctx.setTransform(1, item.skew.x, item.skew.y, 1, 0, 0)
136
- }
137
-
138
- ctx.restore()
139
- resolve()
140
- }
141
- })
142
- }
143
-
144
- const drawImage = () => {
145
- workspace.style.display = 'block'
146
- document.querySelector('#upper-panel').style.display = 'none'
147
- document.querySelector('#saving-data').style.display = 'flex'
148
- let img = new Image()
149
- img.src = featureData.effectImagePath
150
- var canvas = document.createElement('canvas')
151
- var ctx = canvas.getContext('2d')
152
- img.onload = () => {
153
- canvas.width = img.width
154
- canvas.height = img.height
155
- Promise.all(
156
- featureData.elements.map((item, indexValue) => {
157
- if (item.type === 'image') {
158
- return new Promise((resolve, reject) => {
159
- drawInputImage(ctx, item, indexValue, canvas).then(() => {
160
- resolve()
161
- })
162
- })
163
- }
164
- })
165
- ).then(() => {
166
- ctx.filter = 'none'
167
- ctx.drawImage(img, 0, 0, canvas.width, canvas.height)
168
- featureData.elements.map((item, indexValue) => {
169
- if (item.type === 'text') {
170
- let myFont = new FontFace(item.font, `url(${item.fontPath})`)
171
- myFont.load().then(function (font) {
172
- ctx.save()
173
- document.fonts.add(font)
174
- let textValue = document.querySelector(`#${item.id}`).value
175
- if (textValue.length > 10 && item.fontSizeOption1) {
176
- item.fontSize = item.fontSizeOption1
177
- }
178
- if (textValue.length > 12 && item.fontSizeOption2) {
179
- item.fontSize = item.fontSizeOption2
180
- }
181
- if (textValue.length > 15 && item.fontSizeOption3) {
182
- item.fontSize = item.fontSizeOption3
183
- }
184
- if (textValue.length > 20 && item.fontSizeOption4) {
185
- item.fontSize = item.fontSizeOption4
186
- }
187
- ctx.font = `${item.fontSize}px ${item.font}`
188
- if (item.shadowColor) {
189
- ctx.shadowColor = `${item.shadowColor}`
190
- }
191
-
192
- if (item.shadowOffsetX) {
193
- ctx.shadowOffsetX = 3
194
- }
195
- if (item.shadowOffsetY) {
196
- ctx.shadowOffsetY = 3
197
- }
198
- if (item.shadowBlur) {
199
- ctx.shadowBlur = 2
200
- }
201
- if (item.rotate) {
202
- ctx.rotate((item.rotate * Math.PI) / 180)
203
- }
204
- ctx.textAlign = 'center'
205
- ctx.fillStyle = `${item.color}`
206
- ctx.fillText(textValue, item.x, item.y)
207
- ctx.restore()
208
- })
209
- }
210
- if (item.type === 'rectangle') {
211
- }
212
- })
213
- canvasPanel.innerHTML = ''
214
- document.querySelector('#saving-data').style.display = 'none'
215
- canvasPanel.appendChild(canvas)
216
- })
217
- }
218
- }
219
- const cropImage = (result, id) => {
220
- let image = new Image()
221
- image.onload = () => {
222
- let img = document.createElement('img')
223
- img.src = result
224
- img.id = 'image'
225
- document.querySelector('.crop-image-modal-body').appendChild(img)
226
- cropper = new Cropper(img, {
227
- viewMode: 3,
228
- ready() {
229
- let find = featureData.elements.find((i) => i.id === id)
230
- cropper.setAspectRatio(Number(find.width) / Number(find.height))
231
- cropModal.style.display = 'flex'
232
- this.cropper.crop()
233
- },
234
- crop(event) {
235
- cropWidth = Math.round(event.detail.width)
236
- cropHeight = Math.round(event.detail.height)
237
- },
238
- })
239
- }
240
- image.src = result
241
- }
242
- const updateCropper = (result, id) => {
243
- cropper.destroy()
244
- document.querySelector('.crop-image-modal-body').innerHTML = ''
245
- cropImage(result, id)
246
- }
247
- document.querySelector('#crop').addEventListener('click', () => {
248
- let cropperImg = cropper
249
- .getCroppedCanvas({
250
- width: cropWidth,
251
- height: cropHeight,
252
- })
253
- .toDataURL()
254
- face(cropperImg).then(([value]) => {
255
- value.then((result) => {
256
- files[index - 1] = result
257
- document.querySelector(`#image-pre-${index}`).src = result
258
- document.querySelector(`#image-pre-${index}`).style.display = 'block'
259
- document.querySelector(`#cam-${index}`).style.display = 'none'
260
- cropModal.style.display = 'none'
261
- })
262
- })
263
- })
264
- const openExamplesModal = () => {
265
- exampleModal.style.display = 'flex'
266
- }
267
- let inputFile = ''
268
- const handleFile = (file) => {
269
- cropModal.style.display = 'flex'
270
- document.querySelector('#file-loader').style.display = 'flex'
271
- document.querySelector('.file-input').style.display = 'none'
272
- inputFile = file
273
- if (file) {
274
- const reader = new FileReader()
275
- reader.onload = (e) => {
276
- if (e.target.result) {
277
- cropImage(e.target.result)
278
- }
279
- }
280
- reader.readAsDataURL(file)
281
- }
282
- }
283
- const showLoading = () => {
284
- document.querySelector('#file-loader').style.display = 'flex'
285
- document.querySelector('.file-input').style.display = 'none'
286
- }
287
- const stopLoading = () => {
288
- fileDropBox.style.display = 'none'
289
- }
290
- download.addEventListener('click', () => {
291
- let canvas = document.querySelector('canvas')
292
- let url = canvas.toDataURL(`image/png`)
293
- let a = document.createElement('a')
294
- a.href = url
295
- a.download = `safeimagekit-photo-effect-image.png`
296
- document.body.appendChild(a)
297
- a.click()
298
- if (lang === 'en') {
299
- window.location.href = `/download?tool=${pageTool}`
300
- } else {
301
- window.location.href = `/${lang}/download?tool=${pageTool}`
302
- }
303
- })
1
+ const getScript = document.currentScript
2
+ const pageTool = getScript.dataset.tool
3
+ const lang = getScript.dataset.lang
4
+ const inputBox = document.querySelector('#Inputbox')
5
+ const fileDropBox = document.querySelector('.custom-box')
6
+ const cropModal = document.querySelector('.crop-image-modal-container')
7
+ const exampleModal = document.querySelector('.example-images-modal-container')
8
+ const workspace = document.getElementById('workspace')
9
+ const canvasPanel = document.getElementById('canvas-panel')
10
+ const download = document.querySelector('#download-button')
11
+ const form = document.querySelector('#effect-form')
12
+ let files = []
13
+ let cropWidth = null
14
+ let cropHeight = null
15
+ let cropper = null
16
+ let cropInputWidth = null
17
+ let index = 0
18
+ let cropInputHeight = null
19
+ let image = null
20
+ const showLoader = () => {
21
+ showLoading()
22
+ }
23
+ const closeLoader = () => {}
24
+ const clickInput = (e) => {
25
+ console.log(`#file-${e.dataset.index}`)
26
+ document.querySelector(`#file-${e.dataset.index}`).click()
27
+ }
28
+ let featureData = null
29
+
30
+ fetch('/assets/js/photo-effects.json')
31
+ .then((response) => response.json())
32
+ .then((data) => {
33
+ featureData = data.find((i) => i.name === form.dataset.featureName)
34
+ console.log(featureData)
35
+ })
36
+ const fileOnChange = (e) => {
37
+ index = Number(e.dataset.index)
38
+ let reader = new FileReader()
39
+ reader.onload = (event) => {
40
+ cropModal.style.display = 'flex'
41
+ if (cropper === null) {
42
+ cropImage(event.target.result, e.id)
43
+ } else {
44
+ updateCropper(event.target.result, e.id)
45
+ }
46
+ }
47
+ reader.readAsDataURL(e.files[0])
48
+ }
49
+ const closeModal = () => {
50
+ cropModal.style.display = 'none'
51
+ }
52
+ const closeExamplesModal = () => {
53
+ exampleModal.style.display = 'none'
54
+ }
55
+ form.addEventListener('submit', (e) => {
56
+ e.preventDefault()
57
+ drawImage()
58
+ })
59
+ async function face(imageSrc) {
60
+ return new Promise(async (resolve, reject) => {
61
+ await faceapi.nets.tinyFaceDetector.load('/assets/js/models'),
62
+ await faceapi.nets.faceLandmark68Net.load('/assets/js/models'),
63
+ await faceapi.nets.faceRecognitionNet.load('/assets/js/models'),
64
+ await faceapi.nets.faceExpressionNet.load('/assets/js/models')
65
+
66
+ const img = document.createElement('img')
67
+ img.src = imageSrc
68
+ img.onload = async () => {
69
+ const detections = await faceapi.detectSingleFace(
70
+ img,
71
+ new faceapi.TinyFaceDetectorOptions()
72
+ )
73
+ let faceDescriptions = await faceapi.detectSingleFace(
74
+ img,
75
+ new faceapi.TinyFaceDetectorOptions()
76
+ )
77
+ const canvas = document.createElement('canvas')
78
+ faceapi.matchDimensions(canvas, img)
79
+ faceDescriptions = faceapi.resizeResults(faceDescriptions, img)
80
+ faceapi.draw.drawDetections(canvas, faceDescriptions)
81
+ let value = extractFaceFromBox(img, detections.box, canvas)
82
+ resolve([value])
83
+ }
84
+ })
85
+ }
86
+ async function extractFaceFromBox(imageRef, box) {
87
+ const regionsToExtract = [
88
+ new faceapi.Rect(box.x, box.y, box.width, box.height),
89
+ ]
90
+ let faceImages = await faceapi.extractFaces(imageRef, regionsToExtract)
91
+ if (faceImages.length === 0) {
92
+ return 'no face found'
93
+ } else {
94
+ return faceImages[0].toDataURL()
95
+ }
96
+ }
97
+ const drawInputImage = (ctx, item, indexValue) => {
98
+ return new Promise((resolve, reject) => {
99
+ let image = document.createElement('img')
100
+ image.src = files[indexValue]
101
+ image.onload = () => {
102
+ ctx.save()
103
+ image.width = Number(item.width)
104
+ image.height = Number(item.height)
105
+ if (item.filter) {
106
+ ctx.filter = item.filter
107
+ }
108
+ if (item.rotate) {
109
+ ctx.rotate((item.rotate * Math.PI) / 180)
110
+ }
111
+ let perspectiveKey = 'perspective' in item
112
+ if (!perspectiveKey) {
113
+ ctx.drawImage(
114
+ image,
115
+ Number(item.x),
116
+ Number(item.y),
117
+ image.width,
118
+ image.height
119
+ )
120
+ }
121
+ if (item.translate) {
122
+ ctx.translate(item.translate.x, item.translate.y)
123
+ }
124
+ if (perspectiveKey) {
125
+ let p = new Perspective(ctx, image)
126
+ p.draw([
127
+ [item.perspective.topLeft.x, item.perspective.topLeft.y],
128
+ [item.perspective.topRight.x, item.perspective.topRight.y],
129
+ [item.perspective.bottomRight.x, item.perspective.bottomRight.y],
130
+ [item.perspective.bottomLeft.x, item.perspective.bottomLeft.y],
131
+ ])
132
+ }
133
+
134
+ if (item.skew) {
135
+ ctx.setTransform(1, item.skew.x, item.skew.y, 1, 0, 0)
136
+ }
137
+
138
+ ctx.restore()
139
+ resolve()
140
+ }
141
+ })
142
+ }
143
+
144
+ const drawImage = () => {
145
+ workspace.style.display = 'block'
146
+ document.querySelector('#upper-panel').style.display = 'none'
147
+ document.querySelector('#saving-data').style.display = 'flex'
148
+ let img = new Image()
149
+ img.src = featureData.effectImagePath
150
+ var canvas = document.createElement('canvas')
151
+ var ctx = canvas.getContext('2d')
152
+ img.onload = () => {
153
+ canvas.width = img.width
154
+ canvas.height = img.height
155
+ Promise.all(
156
+ featureData.elements.map((item, indexValue) => {
157
+ if (item.type === 'image') {
158
+ return new Promise((resolve, reject) => {
159
+ drawInputImage(ctx, item, indexValue, canvas).then(() => {
160
+ resolve()
161
+ })
162
+ })
163
+ }
164
+ })
165
+ ).then(() => {
166
+ ctx.filter = 'none'
167
+ ctx.drawImage(img, 0, 0, canvas.width, canvas.height)
168
+ featureData.elements.map((item, indexValue) => {
169
+ if (item.type === 'text') {
170
+ let myFont = new FontFace(item.font, `url(${item.fontPath})`)
171
+ myFont.load().then(function (font) {
172
+ ctx.save()
173
+ document.fonts.add(font)
174
+ let textValue = document.querySelector(`#${item.id}`).value
175
+ if (textValue.length > 10 && item.fontSizeOption1) {
176
+ item.fontSize = item.fontSizeOption1
177
+ }
178
+ if (textValue.length > 12 && item.fontSizeOption2) {
179
+ item.fontSize = item.fontSizeOption2
180
+ }
181
+ if (textValue.length > 15 && item.fontSizeOption3) {
182
+ item.fontSize = item.fontSizeOption3
183
+ }
184
+ if (textValue.length > 20 && item.fontSizeOption4) {
185
+ item.fontSize = item.fontSizeOption4
186
+ }
187
+ ctx.font = `${item.fontSize}px ${item.font}`
188
+ if (item.shadowColor) {
189
+ ctx.shadowColor = `${item.shadowColor}`
190
+ }
191
+
192
+ if (item.shadowOffsetX) {
193
+ ctx.shadowOffsetX = 3
194
+ }
195
+ if (item.shadowOffsetY) {
196
+ ctx.shadowOffsetY = 3
197
+ }
198
+ if (item.shadowBlur) {
199
+ ctx.shadowBlur = 2
200
+ }
201
+ if (item.rotate) {
202
+ ctx.rotate((item.rotate * Math.PI) / 180)
203
+ }
204
+ ctx.textAlign = 'center'
205
+ ctx.fillStyle = `${item.color}`
206
+ ctx.fillText(textValue, item.x, item.y)
207
+ ctx.restore()
208
+ })
209
+ }
210
+ if (item.type === 'rectangle') {
211
+ }
212
+ })
213
+ canvasPanel.innerHTML = ''
214
+ document.querySelector('#saving-data').style.display = 'none'
215
+ canvasPanel.appendChild(canvas)
216
+ })
217
+ }
218
+ }
219
+ const cropImage = (result, id) => {
220
+ let image = new Image()
221
+ image.onload = () => {
222
+ let img = document.createElement('img')
223
+ img.src = result
224
+ img.id = 'image'
225
+ document.querySelector('.crop-image-modal-body').appendChild(img)
226
+ cropper = new Cropper(img, {
227
+ viewMode: 3,
228
+ ready() {
229
+ let find = featureData.elements.find((i) => i.id === id)
230
+ cropper.setAspectRatio(Number(find.width) / Number(find.height))
231
+ cropModal.style.display = 'flex'
232
+ this.cropper.crop()
233
+ },
234
+ crop(event) {
235
+ cropWidth = Math.round(event.detail.width)
236
+ cropHeight = Math.round(event.detail.height)
237
+ },
238
+ })
239
+ }
240
+ image.src = result
241
+ }
242
+ const updateCropper = (result, id) => {
243
+ cropper.destroy()
244
+ document.querySelector('.crop-image-modal-body').innerHTML = ''
245
+ cropImage(result, id)
246
+ }
247
+ document.querySelector('#crop').addEventListener('click', () => {
248
+ let cropperImg = cropper
249
+ .getCroppedCanvas({
250
+ width: cropWidth,
251
+ height: cropHeight,
252
+ })
253
+ .toDataURL()
254
+ face(cropperImg).then(([value]) => {
255
+ value.then((result) => {
256
+ files[index - 1] = result
257
+ document.querySelector(`#image-pre-${index}`).src = result
258
+ document.querySelector(`#image-pre-${index}`).style.display = 'block'
259
+ document.querySelector(`#cam-${index}`).style.display = 'none'
260
+ cropModal.style.display = 'none'
261
+ })
262
+ })
263
+ })
264
+ const openExamplesModal = () => {
265
+ exampleModal.style.display = 'flex'
266
+ }
267
+ let inputFile = ''
268
+ const handleFile = (file) => {
269
+ cropModal.style.display = 'flex'
270
+ document.querySelector('#file-loader').style.display = 'flex'
271
+ document.querySelector('.file-input').style.display = 'none'
272
+ inputFile = file
273
+ if (file) {
274
+ const reader = new FileReader()
275
+ reader.onload = (e) => {
276
+ if (e.target.result) {
277
+ cropImage(e.target.result)
278
+ }
279
+ }
280
+ reader.readAsDataURL(file)
281
+ }
282
+ }
283
+ const showLoading = () => {
284
+ document.querySelector('#file-loader').style.display = 'flex'
285
+ document.querySelector('.file-input').style.display = 'none'
286
+ }
287
+ const stopLoading = () => {
288
+ fileDropBox.style.display = 'none'
289
+ }
290
+ download.addEventListener('click', () => {
291
+ let canvas = document.querySelector('canvas')
292
+ let url = canvas.toDataURL(`image/png`)
293
+ let a = document.createElement('a')
294
+ a.href = url
295
+ a.download = `safeimagekit-photo-effect-image.png`
296
+ document.body.appendChild(a)
297
+ a.click()
298
+ if (lang === 'en') {
299
+ window.location.href = `/download?tool=${pageTool}`
300
+ } else {
301
+ window.location.href = `/${lang}/download?tool=${pageTool}`
302
+ }
303
+ })