@durable-streams/cli 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -70,6 +70,11 @@ durable-stream-dev read my-stream
70
70
 
71
71
  - `STREAM_URL` - Base URL of the stream server (default: `http://localhost:4437`)
72
72
 
73
+ ### Write Options
74
+
75
+ - `--content-type <type>` - Content-Type for the message (default: `application/octet-stream`)
76
+ - `--json` - Shorthand for `--content-type application/json`
77
+
73
78
  ### Commands
74
79
 
75
80
  #### Create a stream
@@ -87,8 +92,26 @@ durable-stream-dev write <stream_id> "Hello, world!"
87
92
  # Pipe content from stdin
88
93
  echo "Hello from stdin" | durable-stream-dev write <stream_id>
89
94
  cat file.txt | durable-stream-dev write <stream_id>
95
+
96
+ # Specify content type
97
+ durable-stream-dev write <stream_id> '{"key": "value"}' --content-type application/json
98
+
99
+ # Shorthand for JSON
100
+ durable-stream-dev write <stream_id> '{"key": "value"}' --json
90
101
  ```
91
102
 
103
+ ##### JSON Mode Array Flattening
104
+
105
+ In JSON mode (`--json` or `--content-type application/json`), top-level arrays are flattened into individual messages:
106
+
107
+ | Input | Messages stored |
108
+ | ------------ | ---------------------- |
109
+ | `{}` | 1 message: `{}` |
110
+ | `[{}, {}]` | 2 messages: `{}`, `{}` |
111
+ | `[[{}, {}]]` | 1 message: `[{}, {}]` |
112
+
113
+ This matches the protocol's batch semantics.
114
+
92
115
  #### Read from a stream
93
116
 
94
117
  ```bash
package/dist/index.cjs CHANGED
@@ -23,9 +23,73 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
23
23
  }) : target, mod));
24
24
 
25
25
  //#endregion
26
+ const node_path = __toESM(require("node:path"));
26
27
  const node_process = __toESM(require("node:process"));
28
+ const node_url = __toESM(require("node:url"));
27
29
  const __durable_streams_client = __toESM(require("@durable-streams/client"));
28
30
 
31
+ //#region src/jsonUtils.ts
32
+ /**
33
+ * Check if content-type indicates JSON mode.
34
+ * Handles cases like "application/json; charset=utf-8".
35
+ */
36
+ function isJsonContentType(contentType) {
37
+ return contentType.split(`;`)[0].trim().toLowerCase() === `application/json`;
38
+ }
39
+ /**
40
+ * One-level array flattening for JSON batch semantics.
41
+ * - Single object: `{}` → yields once
42
+ * - Array: `[{}, {}]` → yields each element (flattened)
43
+ * - Nested array: `[[{}, {}]]` → yields `[{}, {}]` (outer array flattened, inner preserved)
44
+ *
45
+ * This matches the protocol's batch semantics where servers flatten exactly one level.
46
+ */
47
+ function* flattenJsonForAppend(parsed) {
48
+ if (Array.isArray(parsed)) for (const item of parsed) yield item;
49
+ else yield parsed;
50
+ }
51
+
52
+ //#endregion
53
+ //#region src/parseWriteArgs.ts
54
+ /**
55
+ * Parse write command arguments, extracting content-type flags and content.
56
+ * @param args - Arguments after the stream_id (starting from index 2)
57
+ * @returns Parsed content type and content string
58
+ * @throws Error if --content-type is missing its value or if unknown flags are provided
59
+ */
60
+ function parseWriteArgs(args) {
61
+ let contentType = `application/octet-stream`;
62
+ let batchJson = false;
63
+ const contentParts = [];
64
+ for (let i = 0; i < args.length; i++) {
65
+ const arg = args[i];
66
+ if (arg === `--json`) {
67
+ contentType = `application/json`;
68
+ continue;
69
+ }
70
+ if (arg === `--batch-json`) {
71
+ batchJson = true;
72
+ contentType = `application/json`;
73
+ continue;
74
+ }
75
+ if (arg === `--content-type`) {
76
+ const nextArg = args[i + 1];
77
+ if (!nextArg || nextArg.startsWith(`--`)) throw new Error(`--content-type requires a value`);
78
+ contentType = nextArg;
79
+ i++;
80
+ continue;
81
+ }
82
+ if (arg.startsWith(`--`)) throw new Error(`unknown flag: ${arg}`);
83
+ contentParts.push(arg);
84
+ }
85
+ return {
86
+ contentType,
87
+ content: contentParts.join(` `),
88
+ batchJson
89
+ };
90
+ }
91
+
92
+ //#endregion
29
93
  //#region src/index.ts
30
94
  const STREAM_URL = process.env.STREAM_URL || `http://localhost:4437`;
31
95
  function printUsage() {
@@ -37,6 +101,11 @@ Usage:
37
101
  durable-stream read <stream_id> Follow a stream and write to stdout
38
102
  durable-stream delete <stream_id> Delete a stream
39
103
 
104
+ Write Options:
105
+ --content-type <type> Content-Type for the message (default: application/octet-stream)
106
+ --json Write as JSON (input stored as single message)
107
+ --batch-json Write as JSON array of messages (each array element stored separately)
108
+
40
109
  Environment Variables:
41
110
  STREAM_URL Base URL of the stream server (default: http://localhost:4437)
42
111
  `);
@@ -54,14 +123,40 @@ async function createStream(streamId) {
54
123
  process.exit(1);
55
124
  }
56
125
  }
57
- async function writeStream(streamId, content) {
126
+ /**
127
+ * Append JSON data to a stream with one-level array flattening.
128
+ */
129
+ async function appendJson(stream, parsed) {
130
+ let count = 0;
131
+ for (const item of flattenJsonForAppend(parsed)) {
132
+ await stream.append(item);
133
+ count++;
134
+ }
135
+ return count;
136
+ }
137
+ async function writeStream(streamId, contentType, batchJson, content) {
58
138
  const url = `${STREAM_URL}/v1/stream/${streamId}`;
139
+ const isJson = isJsonContentType(contentType);
59
140
  try {
60
- const stream = new __durable_streams_client.DurableStream({ url });
141
+ const stream = new __durable_streams_client.DurableStream({
142
+ url,
143
+ contentType
144
+ });
61
145
  if (content) {
62
146
  const processedContent = content.replace(/\\n/g, `\n`).replace(/\\t/g, `\t`).replace(/\\r/g, `\r`).replace(/\\\\/g, `\\`);
63
- await stream.append(processedContent);
64
- console.log(`Wrote ${processedContent.length} bytes to ${streamId}`);
147
+ if (isJson) {
148
+ const parsed = JSON.parse(processedContent);
149
+ if (batchJson) {
150
+ const count = await appendJson(stream, parsed);
151
+ console.log(`Wrote ${count} message(s) to ${streamId}`);
152
+ } else {
153
+ await stream.append(parsed);
154
+ console.log(`Wrote 1 message to ${streamId}`);
155
+ }
156
+ } else {
157
+ await stream.append(processedContent);
158
+ console.log(`Wrote ${processedContent.length} bytes to ${streamId}`);
159
+ }
65
160
  } else {
66
161
  const chunks = [];
67
162
  node_process.stdin.on(`data`, (chunk) => {
@@ -72,8 +167,19 @@ async function writeStream(streamId, content) {
72
167
  node_process.stdin.on(`error`, reject);
73
168
  });
74
169
  const data = Buffer.concat(chunks);
75
- await stream.append(data);
76
- console.log(`Wrote ${data.length} bytes to ${streamId}`);
170
+ if (isJson) {
171
+ const parsed = JSON.parse(data.toString(`utf8`));
172
+ if (batchJson) {
173
+ const count = await appendJson(stream, parsed);
174
+ console.log(`Wrote ${count} message(s) to ${streamId}`);
175
+ } else {
176
+ await stream.append(parsed);
177
+ console.log(`Wrote 1 message to ${streamId}`);
178
+ }
179
+ } else {
180
+ await stream.append(data);
181
+ console.log(`Wrote ${data.length} bytes to ${streamId}`);
182
+ }
77
183
  }
78
184
  } catch (error) {
79
185
  if (error instanceof Error) node_process.stderr.write(`Error writing to stream: ${error.message}\n`);
@@ -126,9 +232,15 @@ async function main() {
126
232
  process.exit(1);
127
233
  }
128
234
  const streamId = args[1];
129
- const content = args.slice(2).join(` `);
130
- if (!node_process.stdin.isTTY) await writeStream(streamId);
131
- else if (content) await writeStream(streamId, content);
235
+ let parsed;
236
+ try {
237
+ parsed = parseWriteArgs(args.slice(2));
238
+ } catch (error) {
239
+ if (error instanceof Error) node_process.stderr.write(`Error: ${error.message}\n`);
240
+ process.exit(1);
241
+ }
242
+ if (!node_process.stdin.isTTY) await writeStream(streamId, parsed.contentType, parsed.batchJson);
243
+ else if (parsed.content) await writeStream(streamId, parsed.contentType, parsed.batchJson, parsed.content);
132
244
  else {
133
245
  node_process.stderr.write(`Error: content required (provide as argument or pipe to stdin)\n`);
134
246
  printUsage();
@@ -160,9 +272,18 @@ async function main() {
160
272
  process.exit(1);
161
273
  }
162
274
  }
163
- main().catch((error) => {
275
+ function isMainModule() {
276
+ if (!process.argv[1]) return false;
277
+ const scriptPath = (0, node_path.resolve)(process.argv[1]);
278
+ const modulePath = (0, node_url.fileURLToPath)(require("url").pathToFileURL(__filename).href);
279
+ return scriptPath === modulePath;
280
+ }
281
+ if (isMainModule()) main().catch((error) => {
164
282
  node_process.stderr.write(`Fatal error: ${error.message}\n`);
165
283
  process.exit(1);
166
284
  });
167
285
 
168
- //#endregion
286
+ //#endregion
287
+ exports.flattenJsonForAppend = flattenJsonForAppend
288
+ exports.isJsonContentType = isJsonContentType
289
+ exports.parseWriteArgs = parseWriteArgs
package/dist/index.d.cts CHANGED
@@ -1 +1,33 @@
1
- export { };
1
+ //#region src/jsonUtils.d.ts
2
+ /**
3
+ * Check if content-type indicates JSON mode.
4
+ * Handles cases like "application/json; charset=utf-8".
5
+ */
6
+ declare function isJsonContentType(contentType: string): boolean;
7
+ /**
8
+ * One-level array flattening for JSON batch semantics.
9
+ * - Single object: `{}` → yields once
10
+ * - Array: `[{}, {}]` → yields each element (flattened)
11
+ * - Nested array: `[[{}, {}]]` → yields `[{}, {}]` (outer array flattened, inner preserved)
12
+ *
13
+ * This matches the protocol's batch semantics where servers flatten exactly one level.
14
+ */
15
+ declare function flattenJsonForAppend(parsed: unknown): Generator<unknown>;
16
+
17
+ //#endregion
18
+ //#region src/parseWriteArgs.d.ts
19
+ interface ParsedWriteArgs {
20
+ contentType: string;
21
+ content: string;
22
+ batchJson: boolean;
23
+ }
24
+ /**
25
+ * Parse write command arguments, extracting content-type flags and content.
26
+ * @param args - Arguments after the stream_id (starting from index 2)
27
+ * @returns Parsed content type and content string
28
+ * @throws Error if --content-type is missing its value or if unknown flags are provided
29
+ */
30
+ declare function parseWriteArgs(args: Array<string>): ParsedWriteArgs;
31
+
32
+ //#endregion
33
+ export { ParsedWriteArgs, flattenJsonForAppend, isJsonContentType, parseWriteArgs };
package/dist/index.d.ts CHANGED
@@ -1 +1,33 @@
1
- export { };
1
+ //#region src/jsonUtils.d.ts
2
+ /**
3
+ * Check if content-type indicates JSON mode.
4
+ * Handles cases like "application/json; charset=utf-8".
5
+ */
6
+ declare function isJsonContentType(contentType: string): boolean;
7
+ /**
8
+ * One-level array flattening for JSON batch semantics.
9
+ * - Single object: `{}` → yields once
10
+ * - Array: `[{}, {}]` → yields each element (flattened)
11
+ * - Nested array: `[[{}, {}]]` → yields `[{}, {}]` (outer array flattened, inner preserved)
12
+ *
13
+ * This matches the protocol's batch semantics where servers flatten exactly one level.
14
+ */
15
+ declare function flattenJsonForAppend(parsed: unknown): Generator<unknown>;
16
+
17
+ //#endregion
18
+ //#region src/parseWriteArgs.d.ts
19
+ interface ParsedWriteArgs {
20
+ contentType: string;
21
+ content: string;
22
+ batchJson: boolean;
23
+ }
24
+ /**
25
+ * Parse write command arguments, extracting content-type flags and content.
26
+ * @param args - Arguments after the stream_id (starting from index 2)
27
+ * @returns Parsed content type and content string
28
+ * @throws Error if --content-type is missing its value or if unknown flags are provided
29
+ */
30
+ declare function parseWriteArgs(args: Array<string>): ParsedWriteArgs;
31
+
32
+ //#endregion
33
+ export { ParsedWriteArgs, flattenJsonForAppend, isJsonContentType, parseWriteArgs };
package/dist/index.js CHANGED
@@ -1,7 +1,71 @@
1
1
  #!/usr/bin/env node
2
+ import { resolve } from "node:path";
2
3
  import { stderr, stdin, stdout } from "node:process";
4
+ import { fileURLToPath } from "node:url";
3
5
  import { DurableStream } from "@durable-streams/client";
4
6
 
7
+ //#region src/jsonUtils.ts
8
+ /**
9
+ * Check if content-type indicates JSON mode.
10
+ * Handles cases like "application/json; charset=utf-8".
11
+ */
12
+ function isJsonContentType(contentType) {
13
+ return contentType.split(`;`)[0].trim().toLowerCase() === `application/json`;
14
+ }
15
+ /**
16
+ * One-level array flattening for JSON batch semantics.
17
+ * - Single object: `{}` → yields once
18
+ * - Array: `[{}, {}]` → yields each element (flattened)
19
+ * - Nested array: `[[{}, {}]]` → yields `[{}, {}]` (outer array flattened, inner preserved)
20
+ *
21
+ * This matches the protocol's batch semantics where servers flatten exactly one level.
22
+ */
23
+ function* flattenJsonForAppend(parsed) {
24
+ if (Array.isArray(parsed)) for (const item of parsed) yield item;
25
+ else yield parsed;
26
+ }
27
+
28
+ //#endregion
29
+ //#region src/parseWriteArgs.ts
30
+ /**
31
+ * Parse write command arguments, extracting content-type flags and content.
32
+ * @param args - Arguments after the stream_id (starting from index 2)
33
+ * @returns Parsed content type and content string
34
+ * @throws Error if --content-type is missing its value or if unknown flags are provided
35
+ */
36
+ function parseWriteArgs(args) {
37
+ let contentType = `application/octet-stream`;
38
+ let batchJson = false;
39
+ const contentParts = [];
40
+ for (let i = 0; i < args.length; i++) {
41
+ const arg = args[i];
42
+ if (arg === `--json`) {
43
+ contentType = `application/json`;
44
+ continue;
45
+ }
46
+ if (arg === `--batch-json`) {
47
+ batchJson = true;
48
+ contentType = `application/json`;
49
+ continue;
50
+ }
51
+ if (arg === `--content-type`) {
52
+ const nextArg = args[i + 1];
53
+ if (!nextArg || nextArg.startsWith(`--`)) throw new Error(`--content-type requires a value`);
54
+ contentType = nextArg;
55
+ i++;
56
+ continue;
57
+ }
58
+ if (arg.startsWith(`--`)) throw new Error(`unknown flag: ${arg}`);
59
+ contentParts.push(arg);
60
+ }
61
+ return {
62
+ contentType,
63
+ content: contentParts.join(` `),
64
+ batchJson
65
+ };
66
+ }
67
+
68
+ //#endregion
5
69
  //#region src/index.ts
6
70
  const STREAM_URL = process.env.STREAM_URL || `http://localhost:4437`;
7
71
  function printUsage() {
@@ -13,6 +77,11 @@ Usage:
13
77
  durable-stream read <stream_id> Follow a stream and write to stdout
14
78
  durable-stream delete <stream_id> Delete a stream
15
79
 
80
+ Write Options:
81
+ --content-type <type> Content-Type for the message (default: application/octet-stream)
82
+ --json Write as JSON (input stored as single message)
83
+ --batch-json Write as JSON array of messages (each array element stored separately)
84
+
16
85
  Environment Variables:
17
86
  STREAM_URL Base URL of the stream server (default: http://localhost:4437)
18
87
  `);
@@ -30,26 +99,63 @@ async function createStream(streamId) {
30
99
  process.exit(1);
31
100
  }
32
101
  }
33
- async function writeStream(streamId, content) {
102
+ /**
103
+ * Append JSON data to a stream with one-level array flattening.
104
+ */
105
+ async function appendJson(stream, parsed) {
106
+ let count = 0;
107
+ for (const item of flattenJsonForAppend(parsed)) {
108
+ await stream.append(item);
109
+ count++;
110
+ }
111
+ return count;
112
+ }
113
+ async function writeStream(streamId, contentType, batchJson, content) {
34
114
  const url = `${STREAM_URL}/v1/stream/${streamId}`;
115
+ const isJson = isJsonContentType(contentType);
35
116
  try {
36
- const stream = new DurableStream({ url });
117
+ const stream = new DurableStream({
118
+ url,
119
+ contentType
120
+ });
37
121
  if (content) {
38
122
  const processedContent = content.replace(/\\n/g, `\n`).replace(/\\t/g, `\t`).replace(/\\r/g, `\r`).replace(/\\\\/g, `\\`);
39
- await stream.append(processedContent);
40
- console.log(`Wrote ${processedContent.length} bytes to ${streamId}`);
123
+ if (isJson) {
124
+ const parsed = JSON.parse(processedContent);
125
+ if (batchJson) {
126
+ const count = await appendJson(stream, parsed);
127
+ console.log(`Wrote ${count} message(s) to ${streamId}`);
128
+ } else {
129
+ await stream.append(parsed);
130
+ console.log(`Wrote 1 message to ${streamId}`);
131
+ }
132
+ } else {
133
+ await stream.append(processedContent);
134
+ console.log(`Wrote ${processedContent.length} bytes to ${streamId}`);
135
+ }
41
136
  } else {
42
137
  const chunks = [];
43
138
  stdin.on(`data`, (chunk) => {
44
139
  chunks.push(chunk);
45
140
  });
46
- await new Promise((resolve, reject) => {
47
- stdin.on(`end`, resolve);
141
+ await new Promise((resolve$1, reject) => {
142
+ stdin.on(`end`, resolve$1);
48
143
  stdin.on(`error`, reject);
49
144
  });
50
145
  const data = Buffer.concat(chunks);
51
- await stream.append(data);
52
- console.log(`Wrote ${data.length} bytes to ${streamId}`);
146
+ if (isJson) {
147
+ const parsed = JSON.parse(data.toString(`utf8`));
148
+ if (batchJson) {
149
+ const count = await appendJson(stream, parsed);
150
+ console.log(`Wrote ${count} message(s) to ${streamId}`);
151
+ } else {
152
+ await stream.append(parsed);
153
+ console.log(`Wrote 1 message to ${streamId}`);
154
+ }
155
+ } else {
156
+ await stream.append(data);
157
+ console.log(`Wrote ${data.length} bytes to ${streamId}`);
158
+ }
53
159
  }
54
160
  } catch (error) {
55
161
  if (error instanceof Error) stderr.write(`Error writing to stream: ${error.message}\n`);
@@ -102,9 +208,15 @@ async function main() {
102
208
  process.exit(1);
103
209
  }
104
210
  const streamId = args[1];
105
- const content = args.slice(2).join(` `);
106
- if (!stdin.isTTY) await writeStream(streamId);
107
- else if (content) await writeStream(streamId, content);
211
+ let parsed;
212
+ try {
213
+ parsed = parseWriteArgs(args.slice(2));
214
+ } catch (error) {
215
+ if (error instanceof Error) stderr.write(`Error: ${error.message}\n`);
216
+ process.exit(1);
217
+ }
218
+ if (!stdin.isTTY) await writeStream(streamId, parsed.contentType, parsed.batchJson);
219
+ else if (parsed.content) await writeStream(streamId, parsed.contentType, parsed.batchJson, parsed.content);
108
220
  else {
109
221
  stderr.write(`Error: content required (provide as argument or pipe to stdin)\n`);
110
222
  printUsage();
@@ -136,9 +248,16 @@ async function main() {
136
248
  process.exit(1);
137
249
  }
138
250
  }
139
- main().catch((error) => {
251
+ function isMainModule() {
252
+ if (!process.argv[1]) return false;
253
+ const scriptPath = resolve(process.argv[1]);
254
+ const modulePath = fileURLToPath(import.meta.url);
255
+ return scriptPath === modulePath;
256
+ }
257
+ if (isMainModule()) main().catch((error) => {
140
258
  stderr.write(`Fatal error: ${error.message}\n`);
141
259
  process.exit(1);
142
260
  });
143
261
 
144
- //#endregion
262
+ //#endregion
263
+ export { flattenJsonForAppend, isJsonContentType, parseWriteArgs };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@durable-streams/cli",
3
3
  "description": "CLI tool for working with Durable Streams",
4
- "version": "0.1.3",
4
+ "version": "0.1.5",
5
5
  "author": "Durable Stream contributors",
6
6
  "bin": {
7
7
  "cli": "./dist/index.js",
@@ -12,14 +12,15 @@
12
12
  "url": "https://github.com/durable-streams/durable-streams/issues"
13
13
  },
14
14
  "dependencies": {
15
- "@durable-streams/client": "0.1.2"
15
+ "@durable-streams/client": "0.1.4"
16
16
  },
17
17
  "devDependencies": {
18
18
  "@types/node": "^22.15.21",
19
19
  "tsdown": "^0.9.0",
20
20
  "tsx": "^4.19.2",
21
21
  "typescript": "^5.5.2",
22
- "@durable-streams/server": "0.1.3"
22
+ "vitest": "^3.1.3",
23
+ "@durable-streams/server": "0.1.5"
23
24
  },
24
25
  "engines": {
25
26
  "node": ">=18.0.0"
@@ -62,6 +63,7 @@
62
63
  "build": "tsdown",
63
64
  "dev": "tsdown --watch",
64
65
  "link:dev": "pnpm link --global",
65
- "start:dev": "tsx --watch example-server.ts"
66
+ "start:dev": "tsx --watch example-server.ts",
67
+ "test": "vitest run"
66
68
  }
67
69
  }