@wictorwilen/cocogen 1.0.17 → 1.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/CHANGELOG.md +14 -1
  2. package/README.md +3 -0
  3. package/dist/init/init.d.ts.map +1 -1
  4. package/dist/init/init.js +273 -34
  5. package/dist/init/init.js.map +1 -1
  6. package/dist/init/templates/dotnet/Core/ConnectorCore.cs.ejs +27 -4
  7. package/dist/init/templates/dotnet/Core/ItemId.cs.ejs +39 -0
  8. package/dist/init/templates/dotnet/Core/Validation.cs.ejs +108 -0
  9. package/dist/init/templates/dotnet/Datasource/CsvItemSource.cs.ejs +1 -1
  10. package/dist/init/templates/dotnet/Datasource/IItemSource.cs.ejs +1 -1
  11. package/dist/init/templates/dotnet/Generated/CsvParser.cs.ejs +0 -179
  12. package/dist/init/templates/dotnet/Generated/FromCsvRow.cs.ejs +0 -21
  13. package/dist/init/templates/dotnet/Generated/FromRow.cs.ejs +23 -0
  14. package/dist/init/templates/dotnet/Generated/ItemPayload.cs.ejs +2 -26
  15. package/dist/init/templates/dotnet/Generated/Model.cs.ejs +5 -1
  16. package/dist/init/templates/dotnet/Generated/PropertyTransformBase.cs.ejs +10 -6
  17. package/dist/init/templates/dotnet/Generated/RowParser.cs.ejs +184 -0
  18. package/dist/init/templates/dotnet/Program.commandline.cs.ejs +6 -3
  19. package/dist/init/templates/dotnet/PropertyTransform.cs.ejs +1 -1
  20. package/dist/init/templates/dotnet/README.md.ejs +2 -1
  21. package/dist/init/templates/ts/README.md.ejs +2 -1
  22. package/dist/init/templates/ts/src/cli.ts.ejs +5 -1
  23. package/dist/init/templates/ts/src/core/connectorCore.ts.ejs +21 -2
  24. package/dist/init/templates/ts/src/core/itemId.ts.ejs +34 -0
  25. package/dist/init/templates/ts/src/core/validation.ts.ejs +89 -0
  26. package/dist/init/templates/ts/src/datasource/csvItemSource.ts.ejs +2 -2
  27. package/dist/init/templates/ts/src/datasource/itemSource.ts.ejs +1 -1
  28. package/dist/init/templates/ts/src/generated/csv.ts.ejs +0 -53
  29. package/dist/init/templates/ts/src/generated/fromCsvRow.ts.ejs +0 -19
  30. package/dist/init/templates/ts/src/generated/fromRow.ts.ejs +20 -0
  31. package/dist/init/templates/ts/src/generated/index.ts.ejs +1 -1
  32. package/dist/init/templates/ts/src/generated/itemPayload.ts.ejs +3 -28
  33. package/dist/init/templates/ts/src/generated/model.ts.ejs +7 -1
  34. package/dist/init/templates/ts/src/generated/propertyTransformBase.ts.ejs +9 -3
  35. package/dist/init/templates/ts/src/generated/row.ts.ejs +54 -0
  36. package/dist/init/templates/ts/src/propertyTransform.ts.ejs +1 -1
  37. package/dist/ir.d.ts +12 -0
  38. package/dist/ir.d.ts.map +1 -1
  39. package/dist/tsp/loader.d.ts.map +1 -1
  40. package/dist/tsp/loader.js +63 -3
  41. package/dist/tsp/loader.js.map +1 -1
  42. package/package.json +1 -1
@@ -1,22 +1 @@
1
- // Map CSV rows into the schema model.
2
- using <%= namespaceName %>;
3
- using <%= namespaceName %>.Datasource;
4
1
 
5
- namespace <%= schemaNamespace %>;
6
-
7
- /// <summary>
8
- /// Maps CSV rows into the schema model using generated transforms.
9
- /// </summary>
10
- public static class FromCsvRow
11
- {
12
- /// <summary>
13
- /// Convert a CSV row dictionary into a schema model instance.
14
- /// </summary>
15
- public static <%= itemTypeName %> Parse(IReadOnlyDictionary<string, string?> row)
16
- {
17
- var transforms = new PropertyTransform();
18
- return new <%= itemTypeName %>(
19
- <%- constructorArgLines %>
20
- );
21
- }
22
- }
@@ -0,0 +1,23 @@
1
+ // Map source rows into the schema model.
2
+ using System.Collections.Generic;
3
+ using <%= namespaceName %>;
4
+ using <%= namespaceName %>.Datasource;
5
+
6
+ namespace <%= schemaNamespace %>;
7
+
8
+ /// <summary>
9
+ /// Maps source rows into the schema model using generated transforms.
10
+ /// </summary>
11
+ public static class FromRow
12
+ {
13
+ /// <summary>
14
+ /// Convert a row dictionary into a schema model instance.
15
+ /// </summary>
16
+ public static <%= itemTypeName %> Parse(IReadOnlyDictionary<string, string?> row)
17
+ {
18
+ var transforms = new PropertyTransform();
19
+ return new <%= itemTypeName %>(
20
+ <%- constructorArgLines %>
21
+ );
22
+ }
23
+ }
@@ -1,9 +1,7 @@
1
1
  <% const graphNs = graphApiVersion === "beta" ? "Microsoft.Graph.Beta" : "Microsoft.Graph"; -%>
2
2
  // Build ExternalItem payloads for Graph ingestion.
3
- using System.Security.Cryptography;
4
- using System.Text;
5
- using System.Text.RegularExpressions;
6
3
  using <%= graphNs %>.Models.ExternalConnectors;
4
+ using <%= namespaceName %>.Core;
7
5
 
8
6
  namespace <%= schemaNamespace %>;
9
7
 
@@ -25,29 +23,7 @@ public static class ItemPayload
25
23
 
26
24
  private static string EncodeId(string value)
27
25
  {
28
- if (string.IsNullOrEmpty(value)) return string.Empty;
29
- return IdEncoding switch
30
- {
31
- "base64" => ToBase64Url(Encoding.UTF8.GetBytes(value)),
32
- "hash" => ToBase64Url(SHA256.HashData(Encoding.UTF8.GetBytes(value))),
33
- _ => Slugify(value),
34
- };
35
- }
36
-
37
- private static string ToBase64Url(byte[] bytes)
38
- {
39
- return Convert.ToBase64String(bytes)
40
- .TrimEnd('=')
41
- .Replace('+', '-')
42
- .Replace('/', '_');
43
- }
44
-
45
- private static string Slugify(string value)
46
- {
47
- var lower = value.Trim().ToLowerInvariant();
48
- var slug = Regex.Replace(lower, "[^a-z0-9]+", "-");
49
- slug = Regex.Replace(slug, "^-+|-+$", "");
50
- return slug;
26
+ return ItemId.GetItemId(value, IdEncoding);
51
27
  }
52
28
 
53
29
  /// <summary>
@@ -1,12 +1,16 @@
1
1
  // C# representation of the external item schema.
2
2
  namespace <%= schemaNamespace %>;
3
3
 
4
+ <% if (recordDocLines && recordDocLines.length) { -%>
5
+ <%- recordDocLines.join("\n") %>
6
+ <% } else { -%>
4
7
  /// <summary>
5
8
  /// Schema model generated from TypeSpec.
6
9
  /// </summary>
10
+ <% } -%>
7
11
  public sealed record <%= itemTypeName %>(
8
12
  <% for (let i = 0; i < properties.length; i++) { -%>
9
13
  <%= properties[i].csType %> <%= properties[i].csName %>,
10
14
  <% } -%>
11
- string CocoId = ""
15
+ string InternalId = ""
12
16
  );
@@ -1,16 +1,19 @@
1
1
  // Generated property transforms derived from TypeSpec.
2
2
  using System;
3
3
  using System.Collections.Generic;
4
- <% if (usesPersonEntity) { -%>
4
+ <% if (usesPersonEntity || usesLinq) { -%>
5
5
  using System.Linq;
6
+ <% } -%>
7
+ <% if (usesPersonEntity) { -%>
6
8
  using System.Text.Json;
7
9
  <% } -%>
10
+ using <%= namespaceName %>.Core;
8
11
  using <%= namespaceName %>.Datasource;
9
12
 
10
13
  namespace <%= schemaNamespace %>;
11
14
 
12
15
  /// <summary>
13
- /// Base class for CSV-to-model property transforms.
16
+ /// Base class for row-to-model property transforms.
14
17
  /// </summary>
15
18
  public abstract class PropertyTransformBase
16
19
  {
@@ -30,16 +33,17 @@ public abstract class PropertyTransformBase
30
33
 
31
34
  <% for (const prop of properties) { -%>
32
35
  /// <summary>
33
- /// Transform the <%= prop.name %> property from a CSV row.
36
+ /// Transform the <%= prop.name %> property from a source row.
34
37
  /// </summary>
35
38
  protected virtual <%= prop.csType %> Transform<%= prop.csName %>(IReadOnlyDictionary<string, string?> row)
36
39
  {
37
- <% if (prop.transformThrows) { -%>
40
+ <%_ if (prop.transformThrows) { -%>
38
41
  <%- prop.transformExpression %>;
39
- <% } else { -%>
42
+ <%_ } else { -%>
40
43
  return <%- prop.transformExpression %>;
41
- <% } -%>
44
+ <%_ } -%>
42
45
  }
43
46
 
44
47
  <% } -%>
48
+
45
49
  }
@@ -0,0 +1,184 @@
1
+ // Row value parsing helpers used by generated transforms.
2
+ using System;
3
+ using System.Collections.Generic;
4
+ using System.Linq;
5
+
6
+ namespace <%= namespaceName %>.Datasource;
7
+
8
+ /// <summary>
9
+ /// Helpers for parsing typed values from row dictionaries.
10
+ /// </summary>
11
+ public static class RowParser
12
+ {
13
+ /// <summary>
14
+ /// Read the first matching header value from the row.
15
+ /// </summary>
16
+ public static string ReadValue(IReadOnlyDictionary<string, string?> row, string[] headers)
17
+ {
18
+ if (headers.Length == 0) return "";
19
+ return row.TryGetValue(headers[0], out var v) ? (v ?? "") : "";
20
+ }
21
+
22
+ /// <summary>
23
+ /// Parse a nullable string value into a string.
24
+ /// </summary>
25
+ public static string ParseString(string? value)
26
+ {
27
+ return value ?? "";
28
+ }
29
+
30
+ /// <summary>
31
+ /// Parse a string from a row using the provided headers.
32
+ /// </summary>
33
+ public static string ParseString(IReadOnlyDictionary<string, string?> row, string[] headers)
34
+ {
35
+ return ParseString(ReadValue(row, headers));
36
+ }
37
+
38
+ /// <summary>
39
+ /// Parse a boolean from a row using the provided headers.
40
+ /// </summary>
41
+ public static bool ParseBoolean(IReadOnlyDictionary<string, string?> row, string[] headers)
42
+ {
43
+ return ParseBoolean(ParseString(row, headers));
44
+ }
45
+
46
+ /// <summary>
47
+ /// Parse a boolean from a string value.
48
+ /// </summary>
49
+ public static bool ParseBoolean(string? value)
50
+ {
51
+ var v = ParseString(value);
52
+ return v.Equals("true", StringComparison.OrdinalIgnoreCase) || v.Equals("1");
53
+ }
54
+
55
+ /// <summary>
56
+ /// Parse an Int64 from a row using the provided headers.
57
+ /// </summary>
58
+ public static long ParseInt64(IReadOnlyDictionary<string, string?> row, string[] headers)
59
+ {
60
+ return ParseInt64(ParseString(row, headers));
61
+ }
62
+
63
+ /// <summary>
64
+ /// Parse an Int64 from a string value.
65
+ /// </summary>
66
+ public static long ParseInt64(string? value)
67
+ {
68
+ var v = ParseString(value);
69
+ return long.TryParse(v, out var n) ? n : 0;
70
+ }
71
+
72
+ /// <summary>
73
+ /// Parse a double from a row using the provided headers.
74
+ /// </summary>
75
+ public static double ParseDouble(IReadOnlyDictionary<string, string?> row, string[] headers)
76
+ {
77
+ return ParseDouble(ParseString(row, headers));
78
+ }
79
+
80
+ /// <summary>
81
+ /// Parse a double from a string value.
82
+ /// </summary>
83
+ public static double ParseDouble(string? value)
84
+ {
85
+ var v = ParseString(value);
86
+ return double.TryParse(v, out var n) ? n : 0;
87
+ }
88
+
89
+ /// <summary>
90
+ /// Parse a DateTimeOffset from a row using the provided headers.
91
+ /// </summary>
92
+ public static DateTimeOffset ParseDateTime(IReadOnlyDictionary<string, string?> row, string[] headers)
93
+ {
94
+ return ParseDateTime(ParseString(row, headers));
95
+ }
96
+
97
+ /// <summary>
98
+ /// Parse a DateTimeOffset from a string value.
99
+ /// </summary>
100
+ public static DateTimeOffset ParseDateTime(string? value)
101
+ {
102
+ var v = ParseString(value);
103
+ return DateTimeOffset.TryParse(v, out var dt) ? dt : DateTimeOffset.MinValue;
104
+ }
105
+
106
+ /// <summary>
107
+ /// Parse a string collection from a row using the provided headers.
108
+ /// </summary>
109
+ public static List<string> ParseStringCollection(IReadOnlyDictionary<string, string?> row, string[] headers)
110
+ {
111
+ return ParseStringCollection(ParseString(row, headers));
112
+ }
113
+
114
+ /// <summary>
115
+ /// Parse a string collection from a string value.
116
+ /// </summary>
117
+ public static List<string> ParseStringCollection(string? value)
118
+ {
119
+ var v = ParseString(value);
120
+ return v.Length == 0
121
+ ? new List<string>()
122
+ : v.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries).ToList();
123
+ }
124
+
125
+ /// <summary>
126
+ /// Parse an Int64 collection from a row using the provided headers.
127
+ /// </summary>
128
+ public static List<long> ParseInt64Collection(IReadOnlyDictionary<string, string?> row, string[] headers)
129
+ {
130
+ return ParseInt64Collection(ParseString(row, headers));
131
+ }
132
+
133
+ /// <summary>
134
+ /// Parse an Int64 collection from a string value.
135
+ /// </summary>
136
+ public static List<long> ParseInt64Collection(string? value)
137
+ {
138
+ var v = ParseString(value);
139
+ if (v.Length == 0) return new List<long>();
140
+ return v.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
141
+ .Select((x) => long.TryParse(x, out var n) ? n : 0)
142
+ .ToList();
143
+ }
144
+
145
+ /// <summary>
146
+ /// Parse a double collection from a row using the provided headers.
147
+ /// </summary>
148
+ public static List<double> ParseDoubleCollection(IReadOnlyDictionary<string, string?> row, string[] headers)
149
+ {
150
+ return ParseDoubleCollection(ParseString(row, headers));
151
+ }
152
+
153
+ /// <summary>
154
+ /// Parse a double collection from a string value.
155
+ /// </summary>
156
+ public static List<double> ParseDoubleCollection(string? value)
157
+ {
158
+ var v = ParseString(value);
159
+ if (v.Length == 0) return new List<double>();
160
+ return v.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
161
+ .Select((x) => double.TryParse(x, out var n) ? n : 0)
162
+ .ToList();
163
+ }
164
+
165
+ /// <summary>
166
+ /// Parse a DateTimeOffset collection from a row using the provided headers.
167
+ /// </summary>
168
+ public static List<DateTimeOffset> ParseDateTimeCollection(IReadOnlyDictionary<string, string?> row, string[] headers)
169
+ {
170
+ return ParseDateTimeCollection(ParseString(row, headers));
171
+ }
172
+
173
+ /// <summary>
174
+ /// Parse a DateTimeOffset collection from a string value.
175
+ /// </summary>
176
+ public static List<DateTimeOffset> ParseDateTimeCollection(string? value)
177
+ {
178
+ var v = ParseString(value);
179
+ if (v.Length == 0) return new List<DateTimeOffset>();
180
+ return v.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
181
+ .Select((x) => DateTimeOffset.TryParse(x, out var dt) ? dt : DateTimeOffset.MinValue)
182
+ .ToList();
183
+ }
184
+ }
@@ -167,7 +167,7 @@ async Task ProvisionAsync()
167
167
  /// <summary>
168
168
  /// Ingest items from CSV.
169
169
  /// </summary>
170
- async Task IngestAsync(string? csvPath, bool dryRun, int? limit, bool verbose)
170
+ async Task IngestAsync(string? csvPath, bool dryRun, int? limit, bool verbose, bool failFast)
171
171
  {
172
172
  GraphServiceClient? graph = null;
173
173
  TokenCredential? credential = null;
@@ -186,7 +186,7 @@ async Task IngestAsync(string? csvPath, bool dryRun, int? limit, bool verbose)
186
186
  IItemSource source = new CsvItemSource(path);
187
187
  var core = BuildConnectorCore(graph, credential, connectionId);
188
188
 
189
- await core.IngestAsync(source, dryRun, limit, verbose);
189
+ await core.IngestAsync(source, dryRun, limit, verbose, failFast);
190
190
  }
191
191
 
192
192
  /// <summary>
@@ -205,6 +205,7 @@ async Task DeleteConnectionAsync()
205
205
 
206
206
  var csvOption = new Option<string>("--csv", description: "CSV path");
207
207
  var dryRunOption = new Option<bool>("--dry-run", description: "Build payloads but do not send to Graph");
208
+ var failFastOption = new Option<bool>("--fail-fast", description: "Abort on the first item failure");
208
209
  var limitOption = new Option<int?>("--limit", description: "Limit number of items");
209
210
  var verboseOption = new Option<bool>("--verbose", description: "Print payloads sent to Graph");
210
211
 
@@ -216,12 +217,14 @@ provisionCommand.SetHandler(async () => await ProvisionAsync());
216
217
  var ingestCommand = new Command("ingest", "Ingest items from CSV");
217
218
  ingestCommand.AddOption(csvOption);
218
219
  ingestCommand.AddOption(dryRunOption);
220
+ ingestCommand.AddOption(failFastOption);
219
221
  ingestCommand.AddOption(limitOption);
220
222
  ingestCommand.AddOption(verboseOption);
221
223
  ingestCommand.SetHandler(
222
- async (string? csv, bool dryRun, int? limit, bool verbose) => await IngestAsync(csv, dryRun, limit, verbose),
224
+ async (string? csv, bool dryRun, bool failFast, int? limit, bool verbose) => await IngestAsync(csv, dryRun, limit, verbose, failFast),
223
225
  csvOption,
224
226
  dryRunOption,
227
+ failFastOption,
225
228
  limitOption,
226
229
  verboseOption
227
230
  );
@@ -1,4 +1,4 @@
1
- // Customize CSV-to-model property transforms.
1
+ // Customize row-to-model property transforms.
2
2
  namespace <%= schemaNamespace %>;
3
3
 
4
4
  /// <summary>
@@ -53,6 +53,7 @@ Run `npm install` in this folder to fetch the TypeSpec library.
53
53
  ## Ingest debugging flags
54
54
  Use `dotnet run -- ingest` with:
55
55
  - `--dry-run` (build payloads without sending)
56
+ - `--fail-fast` (abort on the first item failure)
56
57
  - `--limit <n>` (ingest only N items)
57
58
  - `--verbose` (print the exact payload sent to Graph)
58
59
 
@@ -60,7 +61,7 @@ Note: `--dry-run` does not require Azure AD or connection settings.
60
61
 
61
62
  ## Switching from CSV to another datasource
62
63
  1) Implement `IItemSource` in `Datasource/`.
63
- 2) If your source yields raw records, map them to `<%= itemTypeName %>` using `FromCsvRow`-style logic.
64
+ 2) If your source yields raw records, map them to `<%= itemTypeName %>` using `FromRow`-style logic.
64
65
  3) Update `Program.cs` to instantiate your new source instead of `CsvItemSource`.
65
66
 
66
67
  Tip: keep the `IAsyncEnumerable<<%= itemTypeName %>>` pattern for large datasets.
@@ -42,6 +42,7 @@ Run `npm install` to fetch the TypeSpec library.
42
42
  ## Ingest debugging flags
43
43
  Use `npm run ingest --` with:
44
44
  - `--dry-run` (build payloads without sending)
45
+ - `--fail-fast` (abort on the first item failure)
45
46
  - `--limit <n>` (ingest only N items)
46
47
  - `--verbose` (print the exact payload sent to Graph)
47
48
 
@@ -49,7 +50,7 @@ Note: `--dry-run` does not require CONNECTION_ID, but you still need it for real
49
50
 
50
51
  ## Switching from CSV to another datasource
51
52
  1) Implement `ItemSource` in `src/datasource`.
52
- 2) If your source yields raw records, map them to `<%= itemTypeName %>` using `fromCsvRow`-style logic.
53
+ 2) If your source yields raw records, map them to `<%= itemTypeName %>` using `fromRow`-style logic.
53
54
  3) Update `src/cli.ts` to instantiate your new source instead of `CsvItemSource`.
54
55
 
55
56
  Tip: keep the streaming `AsyncIterable<<%= itemTypeName %>>` pattern for large datasets.
@@ -172,6 +172,7 @@ async function ingest(options: {
172
172
  dryRun?: boolean;
173
173
  limit?: number;
174
174
  verbose?: boolean;
175
+ failFast?: boolean;
175
176
  }): Promise<void> {
176
177
  const connectionId = options.dryRun ? "dry-run" : resolveConnectionId();
177
178
  // Swap this for any ItemSource implementation (API, DB, queue, etc.).
@@ -183,6 +184,7 @@ async function ingest(options: {
183
184
  dryRun: options.dryRun,
184
185
  limit: options.limit,
185
186
  verbose: options.verbose,
187
+ failFast: options.failFast,
186
188
  toExternalItem
187
189
  });
188
190
  }
@@ -200,12 +202,14 @@ program
200
202
  .description("Ingest items from CSV")
201
203
  .option("--csv <path>", "CSV path")
202
204
  .option("--dry-run", "Build payloads but do not send to Graph")
205
+ .option("--fail-fast", "Abort on the first item failure")
203
206
  .option("--limit <n>", "Limit number of items", (value) => Number(value))
204
207
  .option("--verbose", "Print payloads sent to Graph")
205
- .action((options: { csv?: string; dryRun?: boolean; limit?: number; verbose?: boolean }) =>
208
+ .action((options: { csv?: string; dryRun?: boolean; limit?: number; verbose?: boolean; failFast?: boolean }) =>
206
209
  ingest({
207
210
  csvPath: options.csv,
208
211
  dryRun: options.dryRun,
212
+ failFast: options.failFast,
209
213
  limit: options.limit,
210
214
  verbose: options.verbose,
211
215
  })
@@ -33,6 +33,7 @@ export type IngestOptions<Item> = {
33
33
  dryRun?: boolean;
34
34
  limit?: number;
35
35
  verbose?: boolean;
36
+ failFast?: boolean;
36
37
  toExternalItem: (item: Item) => unknown;
37
38
  };
38
39
 
@@ -178,6 +179,8 @@ export class ConnectorCore<Item> {
178
179
  */
179
180
  async ingest(options: IngestOptions<Item>): Promise<void> {
180
181
  let count = 0;
182
+ let successCount = 0;
183
+ const failures: Array<{ index: number; id: string; message: string }> = [];
181
184
  for await (const item of options.source.getItems()) {
182
185
  if (options.limit && count >= options.limit) break;
183
186
  const itemId = this.getItemId(item as Item);
@@ -186,9 +189,14 @@ export class ConnectorCore<Item> {
186
189
  try {
187
190
  await this.putItem(options.connectionId, item as Item, Boolean(options.verbose));
188
191
  console.log(`ok: ingested item ${count + 1} (id=${itemId})`);
192
+ successCount++;
189
193
  } catch (error) {
194
+ const message = error instanceof Error ? error.message : String(error);
190
195
  console.error(`error: failed item ${count + 1} (id=${itemId})`);
191
- throw error;
196
+ failures.push({ index: count + 1, id: itemId, message });
197
+ if (options.failFast) {
198
+ throw error;
199
+ }
192
200
  }
193
201
  } else if (options.verbose) {
194
202
  const payload = options.toExternalItem(item as Item) as any;
@@ -202,7 +210,18 @@ export class ConnectorCore<Item> {
202
210
  count++;
203
211
  }
204
212
 
205
- console.log("ok: ingested " + count + " item(s)");
213
+ if (!options.dryRun) {
214
+ console.log(`ok: ingested ${successCount} item(s)`);
215
+ } else {
216
+ console.log(`ok: inspected ${count} item(s)`);
217
+ }
218
+
219
+ if (failures.length > 0) {
220
+ console.warn(`warn: ${failures.length} item(s) failed`);
221
+ for (const failure of failures) {
222
+ console.warn(`warn: failed item ${failure.index} (id=${failure.id}) - ${failure.message}`);
223
+ }
224
+ }
206
225
  }
207
226
 
208
227
  private async graphRequest(method: string, url: string, body?: unknown): Promise<Response> {
@@ -0,0 +1,34 @@
1
+ import { createHash } from "node:crypto";
2
+
3
+ type ItemRecord = Record<string, unknown>;
4
+
5
+ export type IdEncoding = "slug" | "base64" | "hash";
6
+
7
+ export function toBase64Url(value: string | Buffer): string {
8
+ const buffer = typeof value === "string" ? Buffer.from(value, "utf8") : value;
9
+ return buffer
10
+ .toString("base64")
11
+ .replace(/\+/g, "-")
12
+ .replace(/\//g, "_")
13
+ .replace(/=+$/g, "");
14
+ }
15
+
16
+ export function slugify(value: string): string {
17
+ const normalized = value.normalize("NFKD");
18
+ const slug = normalized
19
+ .toLowerCase()
20
+ .replace(/[^a-z0-9]+/g, "-")
21
+ .replace(/(^-+|-+$)/g, "");
22
+ return slug;
23
+ }
24
+
25
+ export function encodeId(value: string, encoding: IdEncoding): string {
26
+ if (encoding === "base64") return toBase64Url(value);
27
+ if (encoding === "hash") return toBase64Url(createHash("sha256").update(value, "utf8").digest());
28
+ return slugify(value);
29
+ }
30
+
31
+ export function getItemId(item: ItemRecord, idPropertyName: string, encoding: IdEncoding): string {
32
+ const raw = (item as any).internalId ?? (item as any)[idPropertyName] ?? "";
33
+ return encodeId(String(raw ?? ""), encoding);
34
+ }
@@ -0,0 +1,89 @@
1
+ type StringConstraints = {
2
+ minLength?: number;
3
+ maxLength?: number;
4
+ pattern?: string;
5
+ format?: string;
6
+ };
7
+
8
+ type NumberConstraints = {
9
+ minValue?: number;
10
+ maxValue?: number;
11
+ };
12
+
13
+ function validateFormat(name: string, value: string, format?: string): void {
14
+ if (!format || !value) return;
15
+ const normalized = format.toLowerCase();
16
+ if (normalized === "email") {
17
+ if (!/^[^@\s]+@[^@\s]+\.[^@\s]+$/.test(value)) {
18
+ throw new Error(`Invalid ${name}: expected email format.`);
19
+ }
20
+ return;
21
+ }
22
+ if (normalized === "uri" || normalized === "url") {
23
+ try {
24
+ new URL(value);
25
+ } catch {
26
+ throw new Error(`Invalid ${name}: expected URI format.`);
27
+ }
28
+ return;
29
+ }
30
+ if (normalized === "uuid") {
31
+ if (!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(value)) {
32
+ throw new Error(`Invalid ${name}: expected UUID format.`);
33
+ }
34
+ return;
35
+ }
36
+ if (normalized === "date-time") {
37
+ const time = Date.parse(value);
38
+ if (Number.isNaN(time)) {
39
+ throw new Error(`Invalid ${name}: expected date-time format.`);
40
+ }
41
+ }
42
+ }
43
+
44
+ function validateString(name: string, value: string, constraints?: StringConstraints): string {
45
+ if (!constraints) return value;
46
+ const length = value.length;
47
+ if (constraints.minLength !== undefined && length < constraints.minLength) {
48
+ throw new Error(`Invalid ${name}: minimum length is ${constraints.minLength}.`);
49
+ }
50
+ if (constraints.maxLength !== undefined && length > constraints.maxLength) {
51
+ throw new Error(`Invalid ${name}: maximum length is ${constraints.maxLength}.`);
52
+ }
53
+ if (constraints.pattern) {
54
+ const regex = new RegExp(constraints.pattern);
55
+ if (value && !regex.test(value)) {
56
+ throw new Error(`Invalid ${name}: does not match required pattern.`);
57
+ }
58
+ }
59
+ validateFormat(name, value, constraints.format);
60
+ return value;
61
+ }
62
+
63
+ function validateNumber(name: string, value: number, constraints?: NumberConstraints): number {
64
+ if (!constraints) return value;
65
+ if (constraints.minValue !== undefined && value < constraints.minValue) {
66
+ throw new Error(`Invalid ${name}: minimum value is ${constraints.minValue}.`);
67
+ }
68
+ if (constraints.maxValue !== undefined && value > constraints.maxValue) {
69
+ throw new Error(`Invalid ${name}: maximum value is ${constraints.maxValue}.`);
70
+ }
71
+ return value;
72
+ }
73
+
74
+ function validateStringCollection(name: string, values: string[], constraints?: StringConstraints): string[] {
75
+ if (!constraints) return values;
76
+ return values.map((value) => validateString(name, value, constraints));
77
+ }
78
+
79
+ function validateNumberCollection(name: string, values: number[], constraints?: NumberConstraints): number[] {
80
+ if (!constraints) return values;
81
+ return values.map((value) => validateNumber(name, value, constraints));
82
+ }
83
+
84
+ export {
85
+ validateString,
86
+ validateNumber,
87
+ validateStringCollection,
88
+ validateNumberCollection
89
+ };
@@ -6,7 +6,7 @@ import { parse } from "csv-parse";
6
6
 
7
7
  import type { <%= itemTypeName %> } from "../<%= schemaFolderName %>/model.js";
8
8
  import type { ItemSource } from "./itemSource.js";
9
- import { fromCsvRow } from "../<%= schemaFolderName %>/fromCsvRow.js";
9
+ import { fromRow } from "../<%= schemaFolderName %>/fromRow.js";
10
10
 
11
11
  /**
12
12
  * CSV-based datasource (default). Replace with your own ItemSource as needed.
@@ -27,7 +27,7 @@ export class CsvItemSource implements ItemSource {
27
27
  const stream = createReadStream(this.filePath, { encoding: "utf8" }).pipe(parser);
28
28
 
29
29
  for await (const row of stream as AsyncIterable<Record<string, unknown>>) {
30
- yield fromCsvRow(row);
30
+ yield fromRow(row);
31
31
  }
32
32
  }
33
33
  }
@@ -5,7 +5,7 @@ import type { <%= itemTypeName %> } from "../<%= schemaFolderName %>/model.js";
5
5
 
6
6
  /**
7
7
  * Contract for any datasource that yields items for ingestion.
8
- * Implement this interface to swap CSV for an API, database, or other system.
8
+ * Implement this interface to swap the default datasource for an API, database, or other system.
9
9
  */
10
10
  export interface ItemSource {
11
11
  /**