keycloakify 7.2.4 → 7.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/download-builtin-keycloak-theme.js +9 -8
- package/bin/download-builtin-keycloak-theme.js.map +1 -1
- package/bin/keycloakify/generateFtl/ftl_object_to_js_code_declaring_an_object.ftl +2 -1
- package/bin/keycloakify/generateFtl/generateFtl.d.ts +1 -1
- package/bin/keycloakify/generateFtl/generateFtl.js +2 -1
- package/bin/keycloakify/generateFtl/generateFtl.js.map +1 -1
- package/bin/tools/downloadAndUnzip.d.ts +1 -3
- package/bin/tools/downloadAndUnzip.js +85 -371
- package/bin/tools/downloadAndUnzip.js.map +1 -1
- package/bin/tools/jar.js +8 -4
- package/bin/tools/jar.js.map +1 -1
- package/bin/tools/partitionPromiseSettledResults.d.ts +2 -0
- package/bin/tools/partitionPromiseSettledResults.js +41 -0
- package/bin/tools/partitionPromiseSettledResults.js.map +1 -0
- package/bin/tools/trimIndent.d.ts +9 -0
- package/bin/tools/trimIndent.js +98 -0
- package/bin/tools/trimIndent.js.map +1 -0
- package/bin/tools/unzip.d.ts +30 -0
- package/bin/tools/unzip.js +345 -0
- package/bin/tools/unzip.js.map +1 -0
- package/login/Fallback.js +3 -0
- package/login/Fallback.js.map +1 -1
- package/login/kcContext/KcContext.d.ts +7 -1
- package/login/kcContext/KcContext.js.map +1 -1
- package/login/kcContext/kcContextMocks.js +3 -0
- package/login/kcContext/kcContextMocks.js.map +1 -1
- package/login/pages/UpdateEmail.d.ts +7 -0
- package/login/pages/UpdateEmail.js +15 -0
- package/login/pages/UpdateEmail.js.map +1 -0
- package/package.json +17 -1
- package/src/bin/download-builtin-keycloak-theme.ts +17 -17
- package/src/bin/keycloakify/generateFtl/ftl_object_to_js_code_declaring_an_object.ftl +2 -1
- package/src/bin/keycloakify/generateFtl/generateFtl.ts +2 -1
- package/src/bin/tools/downloadAndUnzip.ts +52 -236
- package/src/bin/tools/jar.ts +15 -23
- package/src/bin/tools/partitionPromiseSettledResults.ts +11 -0
- package/src/bin/tools/trimIndent.ts +51 -0
- package/src/bin/tools/unzip.ts +184 -0
- package/src/login/Fallback.tsx +3 -0
- package/src/login/kcContext/KcContext.ts +9 -1
- package/src/login/kcContext/kcContextMocks.ts +7 -0
- package/src/login/pages/UpdateEmail.tsx +88 -0
package/src/bin/tools/jar.ts
CHANGED
@@ -3,12 +3,9 @@ import { dirname, relative, sep } from "path";
|
|
3
3
|
import { createWriteStream } from "fs";
|
4
4
|
|
5
5
|
import walk from "./walk";
|
6
|
-
import
|
7
|
-
import zip from "./zip";
|
6
|
+
import zip, { type ZipSource } from "./zip";
|
8
7
|
import { mkdir } from "fs/promises";
|
9
|
-
|
10
|
-
/** Trim leading whitespace from every line */
|
11
|
-
const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1");
|
8
|
+
import trimIndent from "./trimIndent";
|
12
9
|
|
13
10
|
type JarArgs = {
|
14
11
|
rootPath: string;
|
@@ -26,28 +23,23 @@ type JarArgs = {
|
|
26
23
|
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
|
27
24
|
const manifest: ZipSource = {
|
28
25
|
path: "META-INF/MANIFEST.MF",
|
29
|
-
data: Buffer.from(
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
)
|
37
|
-
)
|
26
|
+
data: Buffer.from(trimIndent`
|
27
|
+
Manifest-Version: 1.0
|
28
|
+
Archiver-Version: Plexus Archiver
|
29
|
+
Created-By: Keycloakify
|
30
|
+
Built-By: unknown
|
31
|
+
Build-Jdk: 19.0.0
|
32
|
+
`)
|
38
33
|
};
|
39
34
|
|
40
35
|
const pomProps: ZipSource = {
|
41
36
|
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
|
42
|
-
data: Buffer.from(
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
version=${version}`
|
49
|
-
)
|
50
|
-
)
|
37
|
+
data: Buffer.from(trimIndent`# Generated by keycloakify
|
38
|
+
# ${new Date()}
|
39
|
+
artifactId=${artifactId}
|
40
|
+
groupId=${groupId}
|
41
|
+
version=${version}
|
42
|
+
`)
|
51
43
|
};
|
52
44
|
|
53
45
|
/**
|
@@ -0,0 +1,11 @@
|
|
1
|
+
export type PromiseSettledAndPartitioned<T> = [T[], any[]];
|
2
|
+
|
3
|
+
export function partitionPromiseSettledResults<T>() {
|
4
|
+
return [
|
5
|
+
([successes, failures]: PromiseSettledAndPartitioned<T>, item: PromiseSettledResult<T>) =>
|
6
|
+
item.status === "rejected"
|
7
|
+
? ([successes, [item.reason, ...failures]] as PromiseSettledAndPartitioned<T>)
|
8
|
+
: ([[item.value, ...successes], failures] as PromiseSettledAndPartitioned<T>),
|
9
|
+
[[], []] as PromiseSettledAndPartitioned<T>
|
10
|
+
] as const;
|
11
|
+
}
|
@@ -0,0 +1,51 @@
|
|
1
|
+
/**
|
2
|
+
* Concatenate the string fragments and interpolated values
|
3
|
+
* to get a single string.
|
4
|
+
*/
|
5
|
+
function populateTemplate(strings: TemplateStringsArray, ...args: any[]) {
|
6
|
+
const chunks = [];
|
7
|
+
for (let i = 0; i < strings.length; i++) {
|
8
|
+
let lastStringLineLength = 0;
|
9
|
+
if (strings[i]) {
|
10
|
+
chunks.push(strings[i]);
|
11
|
+
// remember last indent of the string portion
|
12
|
+
lastStringLineLength = strings[i].split("\n").at(-1)?.length ?? 0;
|
13
|
+
}
|
14
|
+
if (args[i]) {
|
15
|
+
// if the interpolation value has newlines, indent the interpolation values
|
16
|
+
// using the last known string indent
|
17
|
+
chunks.push(args[i].replace(/([\r?\n])/g, "$1" + " ".repeat(lastStringLineLength)));
|
18
|
+
}
|
19
|
+
}
|
20
|
+
return chunks.join("");
|
21
|
+
}
|
22
|
+
|
23
|
+
function trimIndentPrivate(removeEmptyLeadingAndTrailingLines: boolean, strings: TemplateStringsArray, ...args: any[]) {
|
24
|
+
// Remove initial and final newlines
|
25
|
+
let string = populateTemplate(strings, ...args);
|
26
|
+
if (removeEmptyLeadingAndTrailingLines) string = string.replace(/^[\r\n]/, "").replace(/[^\S\r\n]*[\r\n]$/, "");
|
27
|
+
const dents = string.match(/^([ \t])+/gm)?.map(s => s.length) ?? [];
|
28
|
+
// No dents? no change required
|
29
|
+
if (!dents || dents.length == 0) return string;
|
30
|
+
const minDent = Math.min(...dents);
|
31
|
+
// The min indentation is 0, no change needed
|
32
|
+
if (!minDent) return string;
|
33
|
+
const dedented = string.replace(new RegExp(`^${" ".repeat(minDent)}`, "gm"), "");
|
34
|
+
return dedented;
|
35
|
+
}
|
36
|
+
|
37
|
+
/**
|
38
|
+
* Shift all lines left by the *smallest* indentation level,
|
39
|
+
* and remove initial newline and all trailing spaces.
|
40
|
+
*/
|
41
|
+
export default function trimIndent(strings: TemplateStringsArray, ...args: any[]) {
|
42
|
+
return trimIndentPrivate(true, strings, ...args);
|
43
|
+
}
|
44
|
+
|
45
|
+
/**
|
46
|
+
* Shift all lines left by the *smallest* indentation level,
|
47
|
+
* and _keep_ initial newline and all trailing spaces.
|
48
|
+
*/
|
49
|
+
trimIndent.keepLeadingAndTrailingNewlines = function (strings: TemplateStringsArray, ...args: any[]) {
|
50
|
+
return trimIndentPrivate(false, strings, ...args);
|
51
|
+
};
|
@@ -0,0 +1,184 @@
|
|
1
|
+
import { createReadStream, createWriteStream } from "fs";
|
2
|
+
import { mkdir, stat, unlink } from "fs/promises";
|
3
|
+
import { dirname as pathDirname, join as pathJoin, relative as pathRelative } from "path";
|
4
|
+
import { type Readable } from "stream";
|
5
|
+
import { createInflateRaw } from "zlib";
|
6
|
+
import { partitionPromiseSettledResults } from "./partitionPromiseSettledResults";
|
7
|
+
|
8
|
+
export type MultiError = Error & { cause: Error[] };
|
9
|
+
|
10
|
+
/**
|
11
|
+
* Extract the archive `zipFile` into the directory `dir`. If `archiveDir` is given,
|
12
|
+
* only that directory will be extracted, stripping the given path components.
|
13
|
+
*
|
14
|
+
* If dir does not exist, it will be created.
|
15
|
+
*
|
16
|
+
* If any archive file exists, it will be overwritten.
|
17
|
+
*
|
18
|
+
* Will unzip using all available nodejs worker threads.
|
19
|
+
*
|
20
|
+
* Will try to clean up extracted files on failure.
|
21
|
+
*
|
22
|
+
* If unpacking fails, will either throw an regular error, or
|
23
|
+
* possibly an `MultiError`, which contains a `cause` field with
|
24
|
+
* a number of root cause errors.
|
25
|
+
*
|
26
|
+
* Warning this method is not optimized for continuous reading of the zip
|
27
|
+
* archive, but is a trade-off between simplicity and allowing extraction
|
28
|
+
* of a single directory from the archive.
|
29
|
+
*
|
30
|
+
* @param zipFilePath the file to unzip
|
31
|
+
* @param extractDirPath the target directory
|
32
|
+
* @param pathOfDirToExtractInArchive if given, unpack only files from this archive directory
|
33
|
+
* @throws {MultiError} error
|
34
|
+
* @returns Promise for a list of full file paths pointing to actually extracted files
|
35
|
+
*/
|
36
|
+
export async function unzip(zipFilePath: string, extractDirPath: string, pathOfDirToExtractInArchive?: string): Promise<string[]> {
|
37
|
+
const dirsCreated: (string | undefined)[] = [];
|
38
|
+
dirsCreated.push(await mkdir(extractDirPath, { recursive: true }));
|
39
|
+
const promises: Promise<string>[] = [];
|
40
|
+
|
41
|
+
// Iterate over all files in the zip, skip files which are not in archiveDir,
|
42
|
+
// if given.
|
43
|
+
for await (const record of iterateZipArchive(zipFilePath)) {
|
44
|
+
const { path: recordPath, createReadStream: createRecordReadStream } = record;
|
45
|
+
if (pathOfDirToExtractInArchive && !recordPath.startsWith(pathOfDirToExtractInArchive)) {
|
46
|
+
continue;
|
47
|
+
}
|
48
|
+
const relativePath = pathOfDirToExtractInArchive ? pathRelative(pathOfDirToExtractInArchive, recordPath) : recordPath;
|
49
|
+
const filePath = pathJoin(extractDirPath, relativePath);
|
50
|
+
const parent = pathDirname(filePath);
|
51
|
+
promises.push(
|
52
|
+
new Promise<string>(async (resolve, reject) => {
|
53
|
+
if (!dirsCreated.includes(parent)) dirsCreated.push(await mkdir(parent, { recursive: true }));
|
54
|
+
|
55
|
+
// Pull the file out of the archive, write it to the target directory
|
56
|
+
const output = createWriteStream(filePath);
|
57
|
+
output.on("error", e => reject(Object.assign(e, { filePath })));
|
58
|
+
output.on("finish", () => resolve(filePath));
|
59
|
+
createRecordReadStream().pipe(output);
|
60
|
+
})
|
61
|
+
);
|
62
|
+
}
|
63
|
+
|
64
|
+
// Wait until _all_ files are either extracted or failed
|
65
|
+
const [success, failure] = (await Promise.allSettled(promises)).reduce(...partitionPromiseSettledResults<string>());
|
66
|
+
|
67
|
+
// If any extraction failed, try to clean up, then throw a MultiError,
|
68
|
+
// which has a `cause` field, containing a list of root cause errors.
|
69
|
+
if (failure.length) {
|
70
|
+
await Promise.all([
|
71
|
+
...success.map(path => unlink(path).catch(_unused => undefined)),
|
72
|
+
...failure.map(e => e && e.path && unlink(e.path as string).catch(_unused => undefined))
|
73
|
+
]);
|
74
|
+
await Promise.all(dirsCreated.filter(Boolean).sort(sortByFolderDepth("desc")));
|
75
|
+
const e = new Error("Failed to extract: " + failure.map(e => e.message).join(";"));
|
76
|
+
(e as any).cause = failure;
|
77
|
+
throw e;
|
78
|
+
}
|
79
|
+
|
80
|
+
return success;
|
81
|
+
}
|
82
|
+
|
83
|
+
function depth(dir: string) {
|
84
|
+
return dir.match(/\//g)?.length ?? 0;
|
85
|
+
}
|
86
|
+
|
87
|
+
function sortByFolderDepth(order: "asc" | "desc") {
|
88
|
+
const ord = order === "asc" ? 1 : -1;
|
89
|
+
return (a: string | undefined, b: string | undefined) => ord * depth(a ?? "") + -ord * depth(b ?? "");
|
90
|
+
}
|
91
|
+
|
92
|
+
/**
|
93
|
+
*
|
94
|
+
* @param file file to read
|
95
|
+
* @param start first byte to read
|
96
|
+
* @param end last byte to read
|
97
|
+
* @returns Promise of a buffer of read bytes
|
98
|
+
*/
|
99
|
+
async function readFileChunk(file: string, start: number, end: number): Promise<Buffer> {
|
100
|
+
const chunks: Buffer[] = [];
|
101
|
+
return new Promise((resolve, reject) => {
|
102
|
+
const stream = createReadStream(file, { start, end });
|
103
|
+
stream.setMaxListeners(Infinity);
|
104
|
+
stream.on("error", e => reject(e));
|
105
|
+
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
106
|
+
stream.on("data", chunk => chunks.push(chunk as Buffer));
|
107
|
+
});
|
108
|
+
}
|
109
|
+
|
110
|
+
type ZipRecord = {
|
111
|
+
path: string;
|
112
|
+
createReadStream: () => Readable;
|
113
|
+
compressionMethod: "deflate" | undefined;
|
114
|
+
};
|
115
|
+
|
116
|
+
type ZipRecordGenerator = AsyncGenerator<ZipRecord, void, unknown>;
|
117
|
+
|
118
|
+
/**
|
119
|
+
* Iterate over all records of a zipfile, and yield a ZipRecord.
|
120
|
+
* Use `record.createReadStream()` to actually read the file.
|
121
|
+
*
|
122
|
+
* Warning this method will only work with single-disk zip files.
|
123
|
+
* Warning this method may fail if the zip archive has an crazy amount
|
124
|
+
* of files and the central directory is not fully contained within the
|
125
|
+
* last 65k bytes of the zip file.
|
126
|
+
*
|
127
|
+
* @param zipFile
|
128
|
+
* @returns AsyncGenerator which will yield ZipRecords
|
129
|
+
*/
|
130
|
+
async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
|
131
|
+
// Need to know zip file size before we can do anything else
|
132
|
+
const { size } = await stat(zipFile);
|
133
|
+
const chunkSize = 65_535 + 22 + 1; // max comment size + end header size + wiggle
|
134
|
+
// Read last ~65k bytes. Zip files have an comment up to 65_535 bytes at the very end,
|
135
|
+
// before that comes the zip central directory end header.
|
136
|
+
let chunk = await readFileChunk(zipFile, size - chunkSize, size);
|
137
|
+
const unread = size - chunk.length;
|
138
|
+
let i = chunk.length - 4;
|
139
|
+
let found = false;
|
140
|
+
// Find central directory end header, reading backwards from the end
|
141
|
+
while (!found && i-- > 0) if (chunk[i] === 0x50 && chunk.readUInt32LE(i) === 0x06054b50) found = true;
|
142
|
+
if (!found) throw new Error("Not a zip file");
|
143
|
+
// This method will fail on a multi-disk zip, so bail early.
|
144
|
+
if (chunk.readUInt16LE(i + 4) !== 0) throw new Error("Multi-disk zip not supported");
|
145
|
+
let nFiles = chunk.readUint16LE(i + 10);
|
146
|
+
// Get the position of the central directory
|
147
|
+
const directorySize = chunk.readUint32LE(i + 12);
|
148
|
+
const directoryOffset = chunk.readUint32LE(i + 16);
|
149
|
+
if (directoryOffset === 0xffff_ffff) throw new Error("zip64 not supported");
|
150
|
+
if (directoryOffset > size) throw new Error(`Central directory offset ${directoryOffset} is outside file`);
|
151
|
+
i = directoryOffset - unread;
|
152
|
+
// If i < 0, it means that the central directory is not contained within `chunk`
|
153
|
+
if (i < 0) {
|
154
|
+
chunk = await readFileChunk(zipFile, directoryOffset, directoryOffset + directorySize);
|
155
|
+
i = 0;
|
156
|
+
}
|
157
|
+
// Now iterate the central directory records, yield an `ZipRecord` for every entry
|
158
|
+
while (nFiles-- > 0) {
|
159
|
+
// Check for marker bytes
|
160
|
+
if (chunk.readUInt32LE(i) !== 0x02014b50) throw new Error("No central directory record at position " + (unread + i));
|
161
|
+
const compressionMethod = ({ 8: "deflate" } as const)[chunk.readUint16LE(i + 10)];
|
162
|
+
const compressedFileSize = chunk.readUint32LE(i + 20);
|
163
|
+
const filenameLength = chunk.readUint16LE(i + 28);
|
164
|
+
const extraLength = chunk.readUint16LE(i + 30);
|
165
|
+
const commentLength = chunk.readUint16LE(i + 32);
|
166
|
+
// Start of the actual content byte stream is after the 'local' record header,
|
167
|
+
// which is 30 bytes long plus filename and extra field
|
168
|
+
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
|
169
|
+
const end = start + compressedFileSize;
|
170
|
+
const filename = chunk.slice(i + 46, i + 46 + filenameLength).toString("utf-8");
|
171
|
+
const createRecordReadStream = () => {
|
172
|
+
const input = createReadStream(zipFile, { start, end });
|
173
|
+
if (compressionMethod === "deflate") {
|
174
|
+
const inflate = createInflateRaw();
|
175
|
+
input.pipe(inflate);
|
176
|
+
return inflate;
|
177
|
+
}
|
178
|
+
return input;
|
179
|
+
};
|
180
|
+
if (end > start) yield { path: filename, createReadStream: createRecordReadStream, compressionMethod };
|
181
|
+
// advance pointer to next central directory entry
|
182
|
+
i += 46 + filenameLength + extraLength + commentLength;
|
183
|
+
}
|
184
|
+
}
|
package/src/login/Fallback.tsx
CHANGED
@@ -25,6 +25,7 @@ const LoginConfigTotp = lazy(() => import("keycloakify/login/pages/LoginConfigTo
|
|
25
25
|
const LogoutConfirm = lazy(() => import("keycloakify/login/pages/LogoutConfirm"));
|
26
26
|
const UpdateUserProfile = lazy(() => import("keycloakify/login/pages/UpdateUserProfile"));
|
27
27
|
const IdpReviewUserProfile = lazy(() => import("keycloakify/login/pages/IdpReviewUserProfile"));
|
28
|
+
const UpdateEmail = lazy(() => import("keycloakify/login/pages/UpdateEmail"));
|
28
29
|
|
29
30
|
export default function Fallback(props: PageProps<KcContext, I18n>) {
|
30
31
|
const { kcContext, ...rest } = props;
|
@@ -75,6 +76,8 @@ export default function Fallback(props: PageProps<KcContext, I18n>) {
|
|
75
76
|
return <UpdateUserProfile kcContext={kcContext} {...rest} />;
|
76
77
|
case "idp-review-user-profile.ftl":
|
77
78
|
return <IdpReviewUserProfile kcContext={kcContext} {...rest} />;
|
79
|
+
case "update-email.ftl":
|
80
|
+
return <UpdateEmail kcContext={kcContext} {...rest} />;
|
78
81
|
}
|
79
82
|
assert<Equals<typeof kcContext, never>>(false);
|
80
83
|
})()}
|
@@ -30,7 +30,8 @@ export type KcContext =
|
|
30
30
|
| KcContext.LoginConfigTotp
|
31
31
|
| KcContext.LogoutConfirm
|
32
32
|
| KcContext.UpdateUserProfile
|
33
|
-
| KcContext.IdpReviewUserProfile
|
33
|
+
| KcContext.IdpReviewUserProfile
|
34
|
+
| KcContext.UpdateEmail;
|
34
35
|
|
35
36
|
export declare namespace KcContext {
|
36
37
|
export type Common = {
|
@@ -381,6 +382,13 @@ export declare namespace KcContext {
|
|
381
382
|
attributesByName: Record<string, Attribute>;
|
382
383
|
};
|
383
384
|
};
|
385
|
+
|
386
|
+
export type UpdateEmail = Common & {
|
387
|
+
pageId: "update-email.ftl";
|
388
|
+
email: {
|
389
|
+
value?: string;
|
390
|
+
};
|
391
|
+
};
|
384
392
|
}
|
385
393
|
|
386
394
|
export type Attribute = {
|
@@ -491,5 +491,12 @@ export const kcContextMocks: KcContext[] = [
|
|
491
491
|
attributes,
|
492
492
|
attributesByName
|
493
493
|
}
|
494
|
+
}),
|
495
|
+
id<KcContext.UpdateEmail>({
|
496
|
+
...kcContextCommonMock,
|
497
|
+
"pageId": "update-email.ftl",
|
498
|
+
"email": {
|
499
|
+
value: "email@example.com"
|
500
|
+
}
|
494
501
|
})
|
495
502
|
];
|
@@ -0,0 +1,88 @@
|
|
1
|
+
import { clsx } from "keycloakify/tools/clsx";
|
2
|
+
import type { PageProps } from "keycloakify/login/pages/PageProps";
|
3
|
+
import { useGetClassName } from "keycloakify/login/lib/useGetClassName";
|
4
|
+
import type { KcContext } from "../kcContext";
|
5
|
+
import type { I18n } from "../i18n";
|
6
|
+
|
7
|
+
export default function UpdateEmail(props: PageProps<Extract<KcContext, { pageId: "update-email.ftl" }>, I18n>) {
|
8
|
+
const { kcContext, i18n, doUseDefaultCss, Template, classes } = props;
|
9
|
+
|
10
|
+
const { getClassName } = useGetClassName({
|
11
|
+
doUseDefaultCss,
|
12
|
+
classes
|
13
|
+
});
|
14
|
+
|
15
|
+
const { msg, msgStr } = i18n;
|
16
|
+
|
17
|
+
const { url, messagesPerField, isAppInitiatedAction, email } = kcContext;
|
18
|
+
|
19
|
+
return (
|
20
|
+
<Template {...{ kcContext, i18n, doUseDefaultCss, classes }} headerNode={msg("updateEmailTitle")}>
|
21
|
+
<form id="kc-update-email-form" className={getClassName("kcFormClass")} action={url.loginAction} method="post">
|
22
|
+
<div
|
23
|
+
className={clsx(getClassName("kcFormGroupClass"), messagesPerField.printIfExists("email", getClassName("kcFormGroupErrorClass")))}
|
24
|
+
>
|
25
|
+
<div className={getClassName("kcLabelWrapperClass")}>
|
26
|
+
<label htmlFor="email" className={getClassName("kcLabelClass")}>
|
27
|
+
{msg("email")}
|
28
|
+
</label>
|
29
|
+
</div>
|
30
|
+
<div className={getClassName("kcInputWrapperClass")}>
|
31
|
+
<input
|
32
|
+
type="text"
|
33
|
+
id="email"
|
34
|
+
name="email"
|
35
|
+
defaultValue={email.value ?? ""}
|
36
|
+
className={getClassName("kcInputClass")}
|
37
|
+
aria-invalid={messagesPerField.existsError("email")}
|
38
|
+
/>
|
39
|
+
</div>
|
40
|
+
</div>
|
41
|
+
|
42
|
+
<div className={getClassName("kcFormGroupClass")}>
|
43
|
+
<div id="kc-form-options" className={getClassName("kcFormOptionsClass")}>
|
44
|
+
<div className={getClassName("kcFormOptionsWrapperClass")}></div>
|
45
|
+
</div>
|
46
|
+
<div id="kc-form-buttons" className={getClassName("kcFormButtonsClass")}>
|
47
|
+
{isAppInitiatedAction ? (
|
48
|
+
<>
|
49
|
+
<input
|
50
|
+
className={clsx(
|
51
|
+
getClassName("kcButtonClass"),
|
52
|
+
getClassName("kcButtonPrimaryClass"),
|
53
|
+
getClassName("kcButtonLargeClass")
|
54
|
+
)}
|
55
|
+
type="submit"
|
56
|
+
defaultValue={msgStr("doSubmit")}
|
57
|
+
/>
|
58
|
+
<button
|
59
|
+
className={clsx(
|
60
|
+
getClassName("kcButtonClass"),
|
61
|
+
getClassName("kcButtonDefaultClass"),
|
62
|
+
getClassName("kcButtonLargeClass")
|
63
|
+
)}
|
64
|
+
type="submit"
|
65
|
+
name="cancel-aia"
|
66
|
+
value="true"
|
67
|
+
>
|
68
|
+
{msg("doCancel")}
|
69
|
+
</button>
|
70
|
+
</>
|
71
|
+
) : (
|
72
|
+
<input
|
73
|
+
className={clsx(
|
74
|
+
getClassName("kcButtonClass"),
|
75
|
+
getClassName("kcButtonPrimaryClass"),
|
76
|
+
getClassName("kcButtonBlockClass"),
|
77
|
+
getClassName("kcButtonLargeClass")
|
78
|
+
)}
|
79
|
+
type="submit"
|
80
|
+
defaultValue={msgStr("doSubmit")}
|
81
|
+
/>
|
82
|
+
)}
|
83
|
+
</div>
|
84
|
+
</div>
|
85
|
+
</form>
|
86
|
+
</Template>
|
87
|
+
);
|
88
|
+
}
|