@ckbfs/api 1.0.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,49 @@
1
+ ---
2
+ description:
3
+ globs:
4
+ alwaysApply: true
5
+ ---
6
+
7
+ You are an expert in TypeScript, Node.js, Next.js App Router, React, Shadcn UI, Radix UI and Tailwind.
8
+
9
+ Code Style and Structure
10
+ - Write concise, technical TypeScript code with accurate examples.
11
+ - Use functional and declarative programming patterns; avoid classes.
12
+ - Prefer iteration and modularization over code duplication.
13
+ - Use descriptive variable names with auxiliary verbs (e.g., isLoading, hasError).
14
+ - Structure files: exported component, subcomponents, helpers, static content, types.
15
+
16
+ Naming Conventions
17
+ - Use lowercase with dashes for directories (e.g., components/auth-wizard).
18
+ - Favor named exports for components.
19
+
20
+ TypeScript Usage
21
+ - Use TypeScript for all code; prefer interfaces over types.
22
+ - Avoid enums; use maps instead.
23
+ - Use functional components with TypeScript interfaces.
24
+
25
+ Syntax and Formatting
26
+ - Use the "function" keyword for pure functions.
27
+ - Avoid unnecessary curly braces in conditionals; use concise syntax for simple statements.
28
+ - Use declarative JSX.
29
+
30
+ UI and Styling
31
+ - Use Shadcn UI, Radix, and Tailwind for components and styling.
32
+ - Implement responsive design with Tailwind CSS; use a mobile-first approach.
33
+
34
+ Performance Optimization
35
+ - Minimize 'use client', 'useEffect', and 'setState'; favor React Server Components (RSC).
36
+ - Wrap client components in Suspense with fallback.
37
+ - Use dynamic loading for non-critical components.
38
+ - Optimize images: use WebP format, include size data, implement lazy loading.
39
+
40
+ Key Conventions
41
+ - Use 'nuqs' for URL search parameter state management.
42
+ - Optimize Web Vitals (LCP, CLS, FID).
43
+ - Limit 'use client':
44
+ - Favor server components and Next.js SSR.
45
+ - Use only for Web API access in small components.
46
+ - Avoid for data fetching or state management.
47
+
48
+ Follow Next.js docs for Data Fetching, Rendering, and Routing.
49
+
package/README.md CHANGED
@@ -108,12 +108,18 @@ npm run example:publish
108
108
  npm run example:append -- --txhash=0x123456...
109
109
  # OR
110
110
  PUBLISH_TX_HASH=0x123456... npm run example:append
111
+
112
+ # Run the retrieve file example (download a file from blockchain)
113
+ npm run example:retrieve -- --txhash=0x123456... --output=./downloaded-file.txt
114
+ # OR
115
+ CKBFS_TX_HASH=0x123456... npm run example:retrieve
111
116
  ```
112
117
 
113
118
  ### Example Files
114
119
 
115
120
  - `examples/publish.ts` - Shows how to publish a file to CKBFS
116
121
  - `examples/append.ts` - Shows how to append to a previously published file
122
+ - `examples/retrieve.ts` - Shows how to retrieve a complete file from the blockchain
117
123
 
118
124
  To run the examples, first set your CKB private key:
119
125
 
package/append.txt CHANGED
@@ -1 +1 @@
1
- This is content to append to the previously published file.
1
+ This is content to append to the previously published file.
@@ -62,7 +62,7 @@ export type BackLinkType = {
62
62
  checksum: number;
63
63
  };
64
64
  export type CKBFSDataType = {
65
- index?: number[];
65
+ index?: number[] | number;
66
66
  indexes?: number[];
67
67
  checksum: number;
68
68
  contentType: Uint8Array;
@@ -41,9 +41,18 @@ exports.CKBFSDataV2 = codec_1.molecule.table({
41
41
  filename: base_1.blockchain.Bytes,
42
42
  backLinks: exports.BackLinksV2,
43
43
  }, ["indexes", "checksum", "contentType", "filename", "backLinks"]);
44
- // Helper function to safely get either index or indexes
44
+ // Helper function to safely get either index or indexes as array
45
45
  function getIndexes(data) {
46
- return data.indexes || data.index || [];
46
+ if (Array.isArray(data.indexes)) {
47
+ return data.indexes;
48
+ }
49
+ if (Array.isArray(data.index)) {
50
+ return data.index;
51
+ }
52
+ if (typeof data.index === 'number') {
53
+ return [data.index];
54
+ }
55
+ return [];
47
56
  }
48
57
  // Helper function to safely get either index or indexes from BackLinkType
49
58
  function getBackLinkIndex(bl) {
@@ -69,31 +78,78 @@ function getBackLinkIndexes(bl) {
69
78
  exports.CKBFSData = {
70
79
  pack: (data, version = constants_1.ProtocolVersion.V2) => {
71
80
  if (version === constants_1.ProtocolVersion.V1) {
72
- // V1 formatting
81
+ // For V1, we need a single number index
82
+ let indexValue = [];
83
+ // Handle the various ways index might be specified
84
+ if (typeof data.index === 'number') {
85
+ // Single number
86
+ indexValue = [data.index];
87
+ }
88
+ else if (Array.isArray(data.index) && data.index.length > 0) {
89
+ // Array of numbers, use as is
90
+ indexValue = data.index;
91
+ }
92
+ else if (Array.isArray(data.indexes) && data.indexes.length > 0) {
93
+ // Try using indexes field if index is not available
94
+ indexValue = data.indexes;
95
+ }
96
+ // Map backlinks to V1 format - ensure each one has a single index
97
+ const v1Backlinks = data.backLinks.map(bl => {
98
+ let singleIndex = 0;
99
+ if (typeof bl.index === 'number') {
100
+ singleIndex = bl.index;
101
+ }
102
+ else if (Array.isArray(bl.indexes) && bl.indexes.length > 0) {
103
+ singleIndex = bl.indexes[0];
104
+ }
105
+ return {
106
+ txHash: bl.txHash,
107
+ index: singleIndex,
108
+ checksum: bl.checksum
109
+ };
110
+ });
73
111
  return exports.CKBFSDataV1.pack({
74
- index: getIndexes(data),
112
+ index: indexValue,
75
113
  checksum: data.checksum,
76
114
  contentType: data.contentType,
77
115
  filename: data.filename,
78
- backLinks: data.backLinks.map(bl => ({
79
- txHash: bl.txHash,
80
- index: getBackLinkIndex(bl),
81
- checksum: bl.checksum,
82
- })),
116
+ backLinks: v1Backlinks
83
117
  });
84
118
  }
85
119
  else {
86
- // V2 formatting
120
+ // V2 format - use indexes as an array
121
+ let indexesArray = [];
122
+ // Handle different index specification formats
123
+ if (Array.isArray(data.indexes) && data.indexes.length > 0) {
124
+ indexesArray = data.indexes;
125
+ }
126
+ else if (Array.isArray(data.index) && data.index.length > 0) {
127
+ indexesArray = data.index;
128
+ }
129
+ else if (typeof data.index === 'number') {
130
+ indexesArray = [data.index];
131
+ }
132
+ // Map backlinks to V2 format - ensure each one has indexes array
133
+ const v2Backlinks = data.backLinks.map(bl => {
134
+ let indexesValue = [];
135
+ if (Array.isArray(bl.indexes) && bl.indexes.length > 0) {
136
+ indexesValue = bl.indexes;
137
+ }
138
+ else if (typeof bl.index === 'number') {
139
+ indexesValue = [bl.index];
140
+ }
141
+ return {
142
+ txHash: bl.txHash,
143
+ indexes: indexesValue,
144
+ checksum: bl.checksum
145
+ };
146
+ });
87
147
  return exports.CKBFSDataV2.pack({
88
- indexes: getIndexes(data),
148
+ indexes: indexesArray,
89
149
  checksum: data.checksum,
90
150
  contentType: data.contentType,
91
151
  filename: data.filename,
92
- backLinks: data.backLinks.map(bl => ({
93
- txHash: bl.txHash,
94
- indexes: getBackLinkIndexes(bl),
95
- checksum: bl.checksum,
96
- })),
152
+ backLinks: v2Backlinks
97
153
  });
98
154
  }
99
155
  },
@@ -46,7 +46,7 @@ function createCKBFSCell(options) {
46
46
  * @returns Promise resolving to the created transaction
47
47
  */
48
48
  async function createPublishTransaction(signer, options) {
49
- const { contentChunks, contentType, filename, lock, feeRate, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, useTypeID = false } = options;
49
+ const { contentChunks, contentType, filename, lock, capacity, feeRate, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, useTypeID = false } = options;
50
50
  // Calculate checksum for the combined content
51
51
  const textEncoder = new TextEncoder();
52
52
  const combinedContent = Buffer.concat(contentChunks);
@@ -84,6 +84,8 @@ async function createPublishTransaction(signer, options) {
84
84
  }
85
85
  // Get CKBFS script config
86
86
  const config = (0, constants_1.getCKBFSScriptConfig)(network, version, useTypeID);
87
+ const preCkbfsTypeScript = new core_1.Script(ensureHexPrefix(config.codeHash), config.hashType, "0x0000000000000000000000000000000000000000000000000000000000000000");
88
+ const ckbfsCellSize = BigInt(outputData.length + preCkbfsTypeScript.occupiedSize + lock.occupiedSize + 8) * 100000000n;
87
89
  // Create pre transaction without cell deps initially
88
90
  const preTx = core_1.Transaction.from({
89
91
  outputs: [
@@ -93,7 +95,8 @@ async function createPublishTransaction(signer, options) {
93
95
  lock,
94
96
  network,
95
97
  version,
96
- useTypeID
98
+ useTypeID,
99
+ capacity: ckbfsCellSize || capacity
97
100
  })
98
101
  ],
99
102
  witnesses: [
@@ -154,7 +157,8 @@ async function createAppendTransaction(signer, options) {
154
157
  // Get CKBFS script config early to use version info
155
158
  const config = (0, constants_1.getCKBFSScriptConfig)(network, version);
156
159
  // Create CKBFS witnesses - this may vary between V1 and V2
157
- const ckbfsWitnesses = (0, witness_1.createChunkedCKBFSWitnesses)(contentChunks);
160
+ // Pass the version to ensure the correct witness format
161
+ const ckbfsWitnesses = (0, witness_1.createChunkedCKBFSWitnesses)(contentChunks, 0, version);
158
162
  // Combine the new content chunks
159
163
  const combinedContent = Buffer.concat(contentChunks);
160
164
  // Instead of calculating a new checksum from scratch, update the existing checksum
@@ -166,42 +170,81 @@ async function createAppendTransaction(signer, options) {
166
170
  let newBackLink;
167
171
  if (version === constants_1.ProtocolVersion.V1) {
168
172
  // V1 format: Use index field (single number)
173
+ // In V1, BackLink.index should be a single number (not an array)
174
+ let singleIndex = 0;
175
+ if (data.index) {
176
+ if (Array.isArray(data.index) && data.index.length > 0) {
177
+ singleIndex = data.index[0];
178
+ }
179
+ else if (typeof data.index === 'number') {
180
+ singleIndex = data.index;
181
+ }
182
+ }
169
183
  newBackLink = {
170
184
  txHash: outPoint.txHash,
171
- index: data.index && data.index.length > 0 ? data.index[0] : 0,
185
+ index: singleIndex,
172
186
  checksum: data.checksum,
173
187
  };
174
188
  }
175
189
  else {
176
190
  // V2 format: Use indexes field (array of numbers)
191
+ const indices = [];
192
+ if (data.indexes && Array.isArray(data.indexes)) {
193
+ indices.push(...data.indexes);
194
+ }
195
+ else if (data.index) {
196
+ if (Array.isArray(data.index)) {
197
+ indices.push(...data.index);
198
+ }
199
+ else if (typeof data.index === 'number') {
200
+ indices.push(data.index);
201
+ }
202
+ }
177
203
  newBackLink = {
178
204
  txHash: outPoint.txHash,
179
- indexes: data.indexes || data.index || [],
205
+ indexes: indices,
180
206
  checksum: data.checksum,
181
207
  };
182
208
  }
183
- // Update backlinks
209
+ // Update backlinks - preserve the existing backlinks
184
210
  const backLinks = [...(data.backLinks || []), newBackLink];
185
- // Define indices based on version
186
- let outputData;
187
211
  // Calculate the actual witness indices where our content is placed
188
212
  // Index 0 is reserved for the secp256k1 witness for signing
189
213
  // So our CKBFS data starts at index 1
190
214
  const contentStartIndex = 1;
191
215
  const witnessIndices = Array.from({ length: contentChunks.length }, (_, i) => contentStartIndex + i);
216
+ // Define indices based on version
217
+ let outputData;
192
218
  if (version === constants_1.ProtocolVersion.V1) {
193
- // In V1, use the first index where content is placed
194
- // (even if we have multiple witnesses, V1 only supports a single index)
219
+ // In V1, index should be a single number (not an array)
220
+ // Rule 13 for V1: Output CKBFS Cell's index cannot be null
195
221
  outputData = molecule_1.CKBFSData.pack({
196
- index: [contentStartIndex],
222
+ index: witnessIndices[0], // Use the first witness index as a single number
197
223
  checksum: contentChecksum,
198
224
  contentType: data.contentType,
199
225
  filename: data.filename,
200
- backLinks,
226
+ backLinks: backLinks.map(bl => {
227
+ // Ensure V1 format backlinks use a single index number
228
+ let singleIndex = 0;
229
+ // Handle existing index field
230
+ if (typeof bl.index === 'number') {
231
+ singleIndex = bl.index;
232
+ }
233
+ // Try to extract from indexes array if available
234
+ else if (bl.indexes && Array.isArray(bl.indexes) && bl.indexes.length > 0) {
235
+ singleIndex = bl.indexes[0];
236
+ }
237
+ return {
238
+ txHash: bl.txHash,
239
+ index: singleIndex,
240
+ checksum: bl.checksum
241
+ };
242
+ }),
201
243
  }, version);
202
244
  }
203
245
  else {
204
246
  // In V2, use all the indices where content is placed
247
+ // Rule 13 for V2: Output CKBFS Cell's indexes cannot be empty
205
248
  outputData = molecule_1.CKBFSData.pack({
206
249
  indexes: witnessIndices,
207
250
  checksum: contentChecksum,
@@ -34,6 +34,7 @@ export declare function isCKBFSWitness(witness: Uint8Array): boolean;
34
34
  * Creates an array of witnesses for a CKBFS transaction from content chunks
35
35
  * @param contentChunks Array of content chunks
36
36
  * @param version Optional version byte (default is 0)
37
+ * @param protocolVersion Optional protocol version (default is V2)
37
38
  * @returns Array of Uint8Array witnesses
38
39
  */
39
- export declare function createChunkedCKBFSWitnesses(contentChunks: Uint8Array[], version?: number): Uint8Array[];
40
+ export declare function createChunkedCKBFSWitnesses(contentChunks: Uint8Array[], version?: number, protocolVersion?: string): Uint8Array[];
@@ -65,8 +65,9 @@ function isCKBFSWitness(witness) {
65
65
  * Creates an array of witnesses for a CKBFS transaction from content chunks
66
66
  * @param contentChunks Array of content chunks
67
67
  * @param version Optional version byte (default is 0)
68
+ * @param protocolVersion Optional protocol version (default is V2)
68
69
  * @returns Array of Uint8Array witnesses
69
70
  */
70
- function createChunkedCKBFSWitnesses(contentChunks, version = 0) {
71
+ function createChunkedCKBFSWitnesses(contentChunks, version = 0, protocolVersion) {
71
72
  return contentChunks.map(chunk => createCKBFSWitness(chunk, version));
72
73
  }
package/example.txt CHANGED
@@ -1 +1 @@
1
- This is a sample file for testing CKBFS.
1
+ Hello, CKBFS! This is a sample file published using 20240906.ce6724722cf6 protocol.
@@ -1,5 +1,5 @@
1
1
  import { CKBFS, NetworkType, ProtocolVersion, CKBFSDataType, extractCKBFSWitnessContent, isCKBFSWitness, CKBFSData } from '../src/index';
2
- import { Script, ClientPublicTestnet, Transaction } from "@ckb-ccc/core";
2
+ import { Script, ClientPublicTestnet, Transaction, ccc } from "@ckb-ccc/core";
3
3
 
4
4
  // Replace with your actual private key
5
5
  const privateKey = process.env.CKB_PRIVATE_KEY || 'your-private-key-here';
@@ -77,25 +77,30 @@ async function getCellInfoFromTransaction(txHash: string): Promise<{
77
77
  // Parse the output data as CKBFS data
78
78
  // First remove 0x prefix if present
79
79
  const rawData = outputData.startsWith('0x')
80
- ? Buffer.from(outputData.slice(2), 'hex')
80
+ ? ccc.bytesFrom(outputData.slice(2), 'hex')
81
81
  : Buffer.from(outputData, 'hex');
82
-
83
- // For demonstration purposes, we'll manually create a CKBFSDataType object
84
- // In a real app, you would properly decode the rawData from molecule format
85
- const sampleDataFields = {
86
- index: [1],
87
- checksum: 12345,
88
- contentType: new TextEncoder().encode('text/plain'),
89
- filename: new TextEncoder().encode('example.txt'),
90
- backLinks: []
91
- };
92
82
 
93
- // Use this as our data
94
- const ckbfsData: CKBFSDataType = sampleDataFields;
83
+ // Actually unpack the raw data using CKBFSData.unpack
84
+ // Use the same protocol version as configured in the SDK
85
+ const version = ProtocolVersion.V2; // Use V2 as configured in SDK initialization
86
+ console.log(`Using protocol version ${version} for unpacking cell data`);
95
87
 
96
- console.log(`CKBFS data processed`);
97
- console.log(`Using filename: ${new TextDecoder().decode(ckbfsData.filename)}`);
98
- console.log(`Using content type: ${new TextDecoder().decode(ckbfsData.contentType)}`);
88
+ let ckbfsData: CKBFSDataType;
89
+ try {
90
+ ckbfsData = CKBFSData.unpack(rawData, version);
91
+
92
+ // Log the actual cell data for transparency
93
+ console.log('Successfully unpacked CKBFS cell data:');
94
+ console.log(`- Checksum: ${ckbfsData.checksum}`);
95
+ console.log(`- File: ${ckbfsData.filename}`);
96
+ console.log(`- Content Type: ${ckbfsData.contentType}`);
97
+ console.log(`- Index: ${ckbfsData.index}`);
98
+ console.log(`- Indexes: ${ckbfsData.indexes}`);
99
+ console.log(`- Backlinks count: ${ckbfsData.backLinks?.length || 0}`);
100
+ } catch (error) {
101
+ console.error('Error unpacking CKBFS data:', error);
102
+ throw new Error(`Failed to unpack CKBFS data: ${error}`);
103
+ }
99
104
 
100
105
  return {
101
106
  outPoint: {
@@ -110,42 +115,8 @@ async function getCellInfoFromTransaction(txHash: string): Promise<{
110
115
  } catch (error) {
111
116
  console.error('Error retrieving transaction data:', error);
112
117
 
113
- // Fallback to simulated data for demonstration purposes
114
- console.warn('FALLBACK: Using simulated data for demonstration');
115
-
116
- // Get lock script for this account
117
- const lock = await ckbfs.getLock();
118
-
119
- // Get CKBFS script config
120
- const config = ckbfs.getCKBFSConfig();
121
-
122
- // Create sample data for demonstration
123
- const sampleData: CKBFSDataType = {
124
- index: [1],
125
- checksum: 12345, // Sample checksum number
126
- contentType: new TextEncoder().encode('text/plain'),
127
- filename: new TextEncoder().encode('example.txt'),
128
- backLinks: []
129
- };
130
-
131
- // Create the type script
132
- const typeArgs = '0x3cc03661013140855e756c032ce83bc270a7ca3f1f3b76ec21a8ea0155ac3a7c';
133
- const typeScript = new Script(
134
- ensureHexPrefix(config.codeHash),
135
- config.hashType as any,
136
- typeArgs
137
- );
138
-
139
- return {
140
- outPoint: {
141
- txHash,
142
- index: 0
143
- },
144
- type: typeScript,
145
- lock,
146
- capacity: 200n * 100000000n,
147
- data: sampleData
148
- };
118
+ // If we can't get or parse the real data, we should fail - not use mock data
119
+ throw new Error(`Failed to retrieve or parse cell data: ${error}`);
149
120
  }
150
121
  }
151
122
 
@@ -0,0 +1,115 @@
1
+ import { CKBFS, NetworkType, ProtocolVersion, getFileContentFromChain, saveFileFromChain } from '../src/index';
2
+ import { ClientPublicTestnet } from "@ckb-ccc/core";
3
+
4
+ // Replace with your actual private key (or leave this default if just reading)
5
+ const privateKey = process.env.CKB_PRIVATE_KEY || 'your-private-key-here';
6
+
7
+ // Parse command line arguments for transaction hash
8
+ const txHashArg = process.argv.find(arg => arg.startsWith('--txhash='));
9
+ const outputArg = process.argv.find(arg => arg.startsWith('--output='));
10
+
11
+ const txHash = txHashArg ? txHashArg.split('=')[1] : process.env.CKBFS_TX_HASH || '';
12
+ const outputPath = outputArg ? outputArg.split('=')[1] : undefined;
13
+
14
+ if (!txHash) {
15
+ console.error('Please provide a transaction hash using --txhash=<tx_hash> or the CKBFS_TX_HASH environment variable');
16
+ process.exit(1);
17
+ }
18
+
19
+ // Initialize the SDK (read-only is fine for retrieval)
20
+ const ckbfs = new CKBFS(
21
+ privateKey,
22
+ NetworkType.Testnet,
23
+ {
24
+ version: ProtocolVersion.V2,
25
+ useTypeID: false
26
+ }
27
+ );
28
+
29
+ // Initialize CKB client for testnet
30
+ const client = new ClientPublicTestnet();
31
+
32
+ /**
33
+ * Example of retrieving a file from CKBFS
34
+ */
35
+ async function retrieveExample() {
36
+ try {
37
+ console.log(`Retrieving CKBFS file from transaction: ${txHash}`);
38
+
39
+ // Get transaction details
40
+ const txWithStatus = await client.getTransaction(txHash);
41
+ if (!txWithStatus || !txWithStatus.transaction) {
42
+ throw new Error(`Transaction ${txHash} not found`);
43
+ }
44
+
45
+ // Find index of the CKBFS cell in outputs (assuming it's the first one with a type script)
46
+ const tx = txWithStatus.transaction;
47
+ let ckbfsCellIndex = 0;
48
+
49
+ // Get cell data
50
+ const outputData = tx.outputsData[ckbfsCellIndex];
51
+ if (!outputData) {
52
+ throw new Error('Output data not found');
53
+ }
54
+
55
+ // Get cell info for retrieval
56
+ const outPoint = {
57
+ txHash,
58
+ index: ckbfsCellIndex
59
+ };
60
+
61
+ // Import necessary components from index
62
+ const { CKBFSData } = require('../src/index');
63
+
64
+ // Parse the output data
65
+ const rawData = outputData.startsWith('0x')
66
+ ? Buffer.from(outputData.slice(2), 'hex')
67
+ : Buffer.from(outputData, 'hex');
68
+
69
+ // Try with both V1 and V2 protocols
70
+ let ckbfsData;
71
+ try {
72
+ console.log('Trying to unpack with V2...');
73
+ ckbfsData = CKBFSData.unpack(rawData, ProtocolVersion.V2);
74
+ } catch (error) {
75
+ console.log('Failed with V2, trying V1...');
76
+ ckbfsData = CKBFSData.unpack(rawData, ProtocolVersion.V1);
77
+ }
78
+
79
+ // Retrieve full file content
80
+ console.log('Retrieving file content by following backlinks...');
81
+ const fileContent = await getFileContentFromChain(client, outPoint, ckbfsData);
82
+ console.log(`Retrieved file content: ${fileContent.length} bytes`);
83
+
84
+ // Save to file
85
+ const savedPath = saveFileFromChain(fileContent, ckbfsData, outputPath);
86
+ console.log(`File saved to: ${savedPath}`);
87
+
88
+ return savedPath;
89
+ } catch (error) {
90
+ console.error('Error retrieving file:', error);
91
+ throw error;
92
+ }
93
+ }
94
+
95
+ /**
96
+ * Main function to run the example
97
+ */
98
+ async function main() {
99
+ console.log('Running CKBFS file retrieval example...');
100
+ console.log('--------------------------------------');
101
+
102
+ try {
103
+ await retrieveExample();
104
+ console.log('Example completed successfully!');
105
+ process.exit(0);
106
+ } catch (error) {
107
+ console.error('Example failed:', error);
108
+ process.exit(1);
109
+ }
110
+ }
111
+
112
+ // Run the example if this file is executed directly
113
+ if (require.main === module) {
114
+ main().catch(console.error);
115
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ckbfs/api",
3
- "version": "1.0.2",
3
+ "version": "1.2.0",
4
4
  "description": "SDK for CKBFS protocol on CKB",
5
5
  "license": "MIT",
6
6
  "author": "Code Monad<code@lab-11.org>",
@@ -14,7 +14,8 @@
14
14
  "test": "jest",
15
15
  "example": "ts-node examples/index.ts",
16
16
  "example:publish": "ts-node examples/publish.ts",
17
- "example:append": "ts-node examples/append.ts"
17
+ "example:append": "ts-node examples/append.ts",
18
+ "example:retrieve": "ts-node examples/retrieve.ts"
18
19
  },
19
20
  "keywords": [
20
21
  "ckb",
@@ -23,17 +24,18 @@
23
24
  "sdk"
24
25
  ],
25
26
  "devDependencies": {
27
+ "@types/jest": "^29.5.12",
26
28
  "@types/node": "^22.7.9",
27
- "ts-node": "^10.9.2",
28
- "typescript": "^5.6.3",
29
29
  "jest": "^29.7.0",
30
30
  "ts-jest": "^29.1.2",
31
- "@types/jest": "^29.5.12"
31
+ "ts-node": "^10.9.2",
32
+ "typescript": "^5.6.3"
32
33
  },
33
34
  "dependencies": {
34
35
  "@ckb-ccc/core": "^0.1.0-alpha.4",
35
36
  "@ckb-lumos/base": "^0.23.0",
36
37
  "@ckb-lumos/codec": "^0.23.0",
38
+ "adler-32": "^1.3.1",
37
39
  "hash-wasm": "^4.11.0"
38
40
  }
39
41
  }
@@ -0,0 +1 @@
1
+ 0x3f61e43834d55fd3f3c8cf9d8a9c643db7790050ffb00734a1fb66da8e98478f
package/src/index.ts CHANGED
@@ -22,7 +22,9 @@ import {
22
22
  writeFile,
23
23
  getContentType,
24
24
  splitFileIntoChunks,
25
- combineChunksToFile
25
+ combineChunksToFile,
26
+ getFileContentFromChain,
27
+ saveFileFromChain
26
28
  } from './utils/file';
27
29
  import {
28
30
  createCKBFSWitness,
@@ -323,6 +325,8 @@ export {
323
325
  getContentType,
324
326
  splitFileIntoChunks,
325
327
  combineChunksToFile,
328
+ getFileContentFromChain,
329
+ saveFileFromChain,
326
330
 
327
331
  // Witness utilities
328
332
  createCKBFSWitness,
@@ -1,4 +1,5 @@
1
1
  import { adler32 } from 'hash-wasm';
2
+ import ADLER32 from 'adler-32';
2
3
 
3
4
  /**
4
5
  * Utility functions for Adler32 checksum generation and verification
@@ -16,24 +17,40 @@ export async function calculateChecksum(data: Uint8Array): Promise<number> {
16
17
  }
17
18
 
18
19
  /**
19
- * Updates an existing checksum with new data
20
+ * Updates an existing checksum with new data using proper rolling Adler-32 calculation
20
21
  * @param previousChecksum The existing checksum to update
21
22
  * @param newData The new data to add to the checksum
22
23
  * @returns Promise resolving to the updated checksum as a number
23
24
  */
24
25
  export async function updateChecksum(previousChecksum: number, newData: Uint8Array): Promise<number> {
25
- // In a real implementation, this would require the actual Adler32 state recovery
26
- // For now, we're simply concatenating the previousChecksum as a hex string with the new data
27
- // and calculating a new checksum
26
+ // Extract a and b values from the previous checksum
27
+ // In Adler-32, the checksum is composed of two 16-bit integers: a and b
28
+ // The final checksum is (b << 16) | a
29
+ const a = previousChecksum & 0xFFFF;
30
+ const b = (previousChecksum >>> 16) & 0xFFFF;
28
31
 
29
- const checksumBytes = Buffer.alloc(4);
30
- checksumBytes.writeUInt32BE(previousChecksum);
32
+ // Use the adler-32 package to calculate a proper rolling checksum
33
+ // The package doesn't have a "resume" function, so we need to work with the underlying algorithm
31
34
 
32
- // Concatenate the previous checksum bytes with the new data
33
- const combinedData = Buffer.concat([checksumBytes, Buffer.from(newData)]);
35
+ // Initialize with existing a and b values
36
+ let adlerA = a;
37
+ let adlerB = b;
38
+ const MOD_ADLER = 65521; // Adler-32 modulo value
34
39
 
35
- // Calculate the new checksum
36
- return calculateChecksum(combinedData);
40
+ // Process each byte of the new data
41
+ for (let i = 0; i < newData.length; i++) {
42
+ adlerA = (adlerA + newData[i]) % MOD_ADLER;
43
+ adlerB = (adlerB + adlerA) % MOD_ADLER;
44
+ }
45
+
46
+ // Combine a and b to get the final checksum
47
+ const updatedChecksum = (adlerB << 16) | adlerA;
48
+
49
+ // The result should match what you'd get from the adler-32 package
50
+ // You can verify this in testing by calculating a full checksum
51
+ // of original + new data and comparing with this rolling result
52
+
53
+ return updatedChecksum;
37
54
  }
38
55
 
39
56
  /**
@@ -0,0 +1,24 @@
1
+ // Create CKBFS cell output data based on version
2
+ let outputData: Uint8Array;
3
+
4
+ if (version === ProtocolVersion.V1) {
5
+ // V1 format: Single index field (a single number, not an array)
6
+ // For V1, use the first index where content is placed
7
+ outputData = CKBFSData.pack({
8
+ index: contentStartIndex,
9
+ checksum,
10
+ contentType: textEncoder.encode(contentType),
11
+ filename: textEncoder.encode(filename),
12
+ backLinks: [],
13
+ }, version);
14
+ } else {
15
+ // V2 format: Multiple indexes (array of numbers)
16
+ // For V2, use all the indices where content is placed
17
+ outputData = CKBFSData.pack({
18
+ indexes: witnessIndices,
19
+ checksum,
20
+ contentType: textEncoder.encode(contentType),
21
+ filename: textEncoder.encode(filename),
22
+ backLinks: [],
23
+ }, version);
24
+ }
package/src/utils/file.ts CHANGED
@@ -106,4 +106,147 @@ export function splitFileIntoChunks(filePath: string, chunkSize: number): Uint8A
106
106
  export function combineChunksToFile(chunks: Uint8Array[], outputPath: string): void {
107
107
  const combinedBuffer = Buffer.concat(chunks.map(chunk => Buffer.from(chunk)));
108
108
  writeFile(outputPath, combinedBuffer);
109
+ }
110
+
111
+ /**
112
+ * Utility function to safely decode buffer to string
113
+ * @param buffer The buffer to decode
114
+ * @returns Decoded string or placeholder on error
115
+ */
116
+ function safelyDecode(buffer: any): string {
117
+ if (!buffer) return '[Unknown]';
118
+ try {
119
+ if (buffer instanceof Uint8Array) {
120
+ return new TextDecoder().decode(buffer);
121
+ } else if (typeof buffer === 'string') {
122
+ return buffer;
123
+ } else {
124
+ return `[Buffer: ${buffer.toString()}]`;
125
+ }
126
+ } catch (e) {
127
+ return '[Decode Error]';
128
+ }
129
+ }
130
+
131
+ /**
132
+ * Retrieves complete file content from the blockchain by following backlinks
133
+ * @param client The CKB client to use for blockchain queries
134
+ * @param outPoint The output point of the latest CKBFS cell
135
+ * @param ckbfsData The data from the latest CKBFS cell
136
+ * @returns Promise resolving to the complete file content
137
+ */
138
+ export async function getFileContentFromChain(
139
+ client: any,
140
+ outPoint: { txHash: string; index: number },
141
+ ckbfsData: any
142
+ ): Promise<Uint8Array> {
143
+ console.log(`Retrieving file: ${safelyDecode(ckbfsData.filename)}`);
144
+ console.log(`Content type: ${safelyDecode(ckbfsData.contentType)}`);
145
+
146
+ // Prepare to collect all content pieces
147
+ const contentPieces: Uint8Array[] = [];
148
+ let currentData = ckbfsData;
149
+ let currentOutPoint = outPoint;
150
+
151
+ // Process the current transaction first
152
+ const tx = await client.getTransaction(currentOutPoint.txHash);
153
+ if (!tx || !tx.transaction) {
154
+ throw new Error(`Transaction ${currentOutPoint.txHash} not found`);
155
+ }
156
+
157
+ // Get content from witnesses
158
+ const indexes = currentData.indexes || (currentData.index !== undefined ? [currentData.index] : []);
159
+ if (indexes.length > 0) {
160
+ // Get content from each witness index
161
+ for (const idx of indexes) {
162
+ if (idx >= tx.transaction.witnesses.length) {
163
+ console.warn(`Witness index ${idx} out of range`);
164
+ continue;
165
+ }
166
+
167
+ const witnessHex = tx.transaction.witnesses[idx];
168
+ const witness = Buffer.from(witnessHex.slice(2), 'hex'); // Remove 0x prefix
169
+
170
+ // Extract content (skip CKBFS header + version byte)
171
+ if (witness.length >= 6 && witness.slice(0, 5).toString() === 'CKBFS') {
172
+ const content = witness.slice(6);
173
+ contentPieces.unshift(content); // Add to beginning of array (we're going backwards)
174
+ } else {
175
+ console.warn(`Witness at index ${idx} is not a valid CKBFS witness`);
176
+ }
177
+ }
178
+ }
179
+
180
+ // Follow backlinks recursively
181
+ if (currentData.backLinks && currentData.backLinks.length > 0) {
182
+ // Process each backlink, from most recent to oldest
183
+ for (let i = currentData.backLinks.length - 1; i >= 0; i--) {
184
+ const backlink = currentData.backLinks[i];
185
+
186
+ // Get the transaction for this backlink
187
+ const backTx = await client.getTransaction(backlink.txHash);
188
+ if (!backTx || !backTx.transaction) {
189
+ console.warn(`Backlink transaction ${backlink.txHash} not found`);
190
+ continue;
191
+ }
192
+
193
+ // Get content from backlink witnesses
194
+ const backIndexes = backlink.indexes || (backlink.index !== undefined ? [backlink.index] : []);
195
+ if (backIndexes.length > 0) {
196
+ // Get content from each witness index
197
+ for (const idx of backIndexes) {
198
+ if (idx >= backTx.transaction.witnesses.length) {
199
+ console.warn(`Backlink witness index ${idx} out of range`);
200
+ continue;
201
+ }
202
+
203
+ const witnessHex = backTx.transaction.witnesses[idx];
204
+ const witness = Buffer.from(witnessHex.slice(2), 'hex'); // Remove 0x prefix
205
+
206
+ // Extract content (skip CKBFS header + version byte)
207
+ if (witness.length >= 6 && witness.slice(0, 5).toString() === 'CKBFS') {
208
+ const content = witness.slice(6);
209
+ contentPieces.unshift(content); // Add to beginning of array (we're going backwards)
210
+ } else {
211
+ console.warn(`Backlink witness at index ${idx} is not a valid CKBFS witness`);
212
+ }
213
+ }
214
+ }
215
+ }
216
+ }
217
+
218
+ // Combine all content pieces
219
+ return Buffer.concat(contentPieces);
220
+ }
221
+
222
+ /**
223
+ * Saves file content retrieved from blockchain to disk
224
+ * @param content The file content to save
225
+ * @param ckbfsData The CKBFS cell data containing file metadata
226
+ * @param outputPath Optional path to save the file (defaults to filename in current directory)
227
+ * @returns The path where the file was saved
228
+ */
229
+ export function saveFileFromChain(
230
+ content: Uint8Array,
231
+ ckbfsData: any,
232
+ outputPath?: string
233
+ ): string {
234
+ // Get filename from CKBFS data
235
+ const filename = safelyDecode(ckbfsData.filename);
236
+
237
+ // Determine output path
238
+ const filePath = outputPath || filename;
239
+
240
+ // Ensure directory exists
241
+ const directory = path.dirname(filePath);
242
+ if (!fs.existsSync(directory)) {
243
+ fs.mkdirSync(directory, { recursive: true });
244
+ }
245
+
246
+ // Write file
247
+ fs.writeFileSync(filePath, content);
248
+ console.log(`File saved to: ${filePath}`);
249
+ console.log(`Size: ${content.length} bytes`);
250
+
251
+ return filePath;
109
252
  }
@@ -1,44 +1,43 @@
1
1
  import { molecule, number } from "@ckb-lumos/codec";
2
2
  import { blockchain } from "@ckb-lumos/base";
3
3
  import { ProtocolVersion } from "./constants";
4
+ import { ccc } from "@ckb-ccc/core";
4
5
 
5
6
  /**
6
7
  * Molecule definitions for CKBFS data structures.
7
8
  */
8
9
 
9
- // Define the Indexes vector
10
+ // Define the Indexes vector for V2
10
11
  export const Indexes = molecule.vector(number.Uint32);
11
12
 
12
- // Define the BackLink table structure for V1
13
+ // V1: BackLink has index as Uint32, and fields are ordered differently
13
14
  export const BackLinkV1 = molecule.table(
14
15
  {
15
- txHash: blockchain.Byte32,
16
16
  index: number.Uint32,
17
17
  checksum: number.Uint32,
18
+ txHash: blockchain.Byte32,
18
19
  },
19
- ["txHash", "index", "checksum"]
20
+ ["index", "checksum", "txHash"]
20
21
  );
21
22
 
22
- // Define the BackLink table structure for V2
23
+ // V2: BackLink has indexes as vector of Uint32
23
24
  export const BackLinkV2 = molecule.table(
24
25
  {
25
- txHash: blockchain.Byte32,
26
26
  indexes: Indexes,
27
27
  checksum: number.Uint32,
28
+ txHash: blockchain.Byte32,
28
29
  },
29
- ["txHash", "indexes", "checksum"]
30
+ ["indexes", "checksum", "txHash"]
30
31
  );
31
32
 
32
- // Define the BackLinks vector for V1
33
+ // Define the BackLinks vector for V1 and V2
33
34
  export const BackLinksV1 = molecule.vector(BackLinkV1);
34
-
35
- // Define the BackLinks vector for V2
36
35
  export const BackLinksV2 = molecule.vector(BackLinkV2);
37
36
 
38
- // Define the CKBFSData table structure for V1
37
+ // V1: CKBFSData has index as optional Uint32
39
38
  export const CKBFSDataV1 = molecule.table(
40
39
  {
41
- index: Indexes,
40
+ index: number.Uint32,
42
41
  checksum: number.Uint32,
43
42
  contentType: blockchain.Bytes,
44
43
  filename: blockchain.Bytes,
@@ -47,7 +46,7 @@ export const CKBFSDataV1 = molecule.table(
47
46
  ["index", "checksum", "contentType", "filename", "backLinks"]
48
47
  );
49
48
 
50
- // Define the CKBFSData table structure for V2
49
+ // V2: CKBFSData has indexes as vector of Uint32
51
50
  export const CKBFSDataV2 = molecule.table(
52
51
  {
53
52
  indexes: Indexes,
@@ -61,41 +60,50 @@ export const CKBFSDataV2 = molecule.table(
61
60
 
62
61
  // Type definitions for TypeScript
63
62
  export type BackLinkTypeV1 = {
64
- txHash: string;
65
63
  index: number;
66
64
  checksum: number;
65
+ txHash: string;
67
66
  };
68
67
 
69
68
  export type BackLinkTypeV2 = {
70
- txHash: string;
71
69
  indexes: number[];
72
70
  checksum: number;
71
+ txHash: string;
73
72
  };
74
73
 
75
74
  // Combined type that works with both versions
76
75
  export type BackLinkType = {
77
- txHash: string;
78
76
  index?: number;
79
77
  indexes?: number[];
80
78
  checksum: number;
79
+ txHash: string;
81
80
  };
82
81
 
83
82
  // Combined CKBFSData type that works with both versions
84
83
  export type CKBFSDataType = {
85
- index?: number[];
84
+ index?: number;
86
85
  indexes?: number[];
87
86
  checksum: number;
88
- contentType: Uint8Array;
89
- filename: Uint8Array;
87
+ contentType: string;
88
+ filename: string;
90
89
  backLinks: BackLinkType[];
91
90
  };
92
91
 
93
- // Helper function to safely get either index or indexes
92
+ // Helper function to get indexes array from data
94
93
  function getIndexes(data: CKBFSDataType): number[] {
95
- return data.indexes || data.index || [];
94
+ if (data.indexes) return data.indexes;
95
+ if (typeof data.index === 'number') return [data.index];
96
+ return [];
97
+ }
98
+
99
+ // Helper function to get single index from data
100
+ function getIndex(data: CKBFSDataType): number {
101
+ if (typeof data.index === 'number') return data.index;
102
+ if (data.indexes && data.indexes.length > 0) return data.indexes[0];
103
+ return 0;
96
104
  }
97
105
 
98
- // Helper function to safely get either index or indexes from BackLinkType
106
+ // Helper function to safely get either index or indexes from BackLinkType for V1
99
107
  function getBackLinkIndex(bl: BackLinkType): number {
100
108
  if (typeof bl.index === 'number') {
101
109
  return bl.index;
@@ -106,7 +114,7 @@ function getBackLinkIndex(bl: BackLinkType): number {
106
114
  return 0;
107
115
  }
108
116
 
109
- // Helper function to safely get indexes array from BackLinkType
117
+ // Helper function to safely get indexes array from BackLinkType for V2
110
118
  function getBackLinkIndexes(bl: BackLinkType): number[] {
111
119
  if (Array.isArray(bl.indexes)) {
112
120
  return bl.indexes;
@@ -121,30 +129,44 @@ function getBackLinkIndexes(bl: BackLinkType): number[] {
121
129
  export const CKBFSData = {
122
130
  pack: (data: CKBFSDataType, version: string = ProtocolVersion.V2): Uint8Array => {
123
131
  if (version === ProtocolVersion.V1) {
124
- // V1 formatting
132
+ // V1 formatting - uses single index
125
133
  return CKBFSDataV1.pack({
126
- index: getIndexes(data),
134
+ index: getIndex(data),
127
135
  checksum: data.checksum,
128
- contentType: data.contentType,
129
- filename: data.filename,
130
- backLinks: data.backLinks.map(bl => ({
131
- txHash: bl.txHash,
132
- index: getBackLinkIndex(bl),
133
- checksum: bl.checksum,
134
- })),
136
+ contentType: ccc.bytesFrom(data.contentType, 'utf8'),
137
+ filename: ccc.bytesFrom(data.filename, 'utf8'),
138
+ backLinks: data.backLinks.map(bl => {
139
+ // Ensure txHash is in proper format for molecule encoding
140
+ const txHash = typeof bl.txHash === 'string'
141
+ ? ccc.bytesFrom(bl.txHash)
142
+ : bl.txHash;
143
+
144
+ return {
145
+ index: getBackLinkIndex(bl),
146
+ checksum: bl.checksum,
147
+ txHash,
148
+ };
149
+ }),
135
150
  });
136
151
  } else {
137
- // V2 formatting
152
+ // V2 formatting - uses indexes array
138
153
  return CKBFSDataV2.pack({
139
154
  indexes: getIndexes(data),
140
155
  checksum: data.checksum,
141
- contentType: data.contentType,
142
- filename: data.filename,
143
- backLinks: data.backLinks.map(bl => ({
144
- txHash: bl.txHash,
145
- indexes: getBackLinkIndexes(bl),
146
- checksum: bl.checksum,
147
- })),
156
+ contentType: ccc.bytesFrom(data.contentType, 'utf8'),
157
+ filename: ccc.bytesFrom(data.filename, 'utf8'),
158
+ backLinks: data.backLinks.map(bl => {
159
+ // Ensure txHash is in proper format for molecule encoding
160
+ const txHash = typeof bl.txHash === 'string'
161
+ ? bl.txHash
162
+ : bl.txHash;
163
+
164
+ return {
165
+ indexes: getBackLinkIndexes(bl),
166
+ checksum: bl.checksum,
167
+ txHash,
168
+ };
169
+ }),
148
170
  });
149
171
  }
150
172
  },
@@ -155,12 +177,12 @@ export const CKBFSData = {
155
177
  return {
156
178
  index: unpacked.index,
157
179
  checksum: unpacked.checksum,
158
- contentType: new Uint8Array(Buffer.from(unpacked.contentType)),
159
- filename: new Uint8Array(Buffer.from(unpacked.filename)),
180
+ contentType: ccc.bytesTo(unpacked.contentType, 'utf8'),
181
+ filename: ccc.bytesTo(unpacked.filename, 'utf8'),
160
182
  backLinks: unpacked.backLinks.map(bl => ({
161
- txHash: bl.txHash,
162
183
  index: bl.index,
163
184
  checksum: bl.checksum,
185
+ txHash: bl.txHash,
164
186
  })),
165
187
  };
166
188
  } else {
@@ -169,12 +191,12 @@ export const CKBFSData = {
169
191
  return {
170
192
  indexes: unpacked.indexes,
171
193
  checksum: unpacked.checksum,
172
- contentType: new Uint8Array(Buffer.from(unpacked.contentType)),
173
- filename: new Uint8Array(Buffer.from(unpacked.filename)),
194
+ contentType: ccc.bytesTo(unpacked.contentType, 'utf8'),
195
+ filename: ccc.bytesTo(unpacked.filename, 'utf8'),
174
196
  backLinks: unpacked.backLinks.map(bl => ({
175
- txHash: bl.txHash,
176
197
  indexes: bl.indexes,
177
198
  checksum: bl.checksum,
199
+ txHash: bl.txHash,
178
200
  })),
179
201
  };
180
202
  }
@@ -125,7 +125,9 @@ export async function createPublishTransaction(
125
125
  const combinedContent = Buffer.concat(contentChunks);
126
126
  const checksum = await calculateChecksum(combinedContent);
127
127
 
128
- // Create CKBFS witnesses
128
+ // Create CKBFS witnesses - each chunk already includes the CKBFS header
129
+ // Pass 0 as version byte - this is the protocol version byte in the witness header
130
+ // not to be confused with the Protocol Version (V1 vs V2)
129
131
  const ckbfsWitnesses = createChunkedCKBFSWitnesses(contentChunks);
130
132
 
131
133
  // Calculate the actual witness indices where our content is placed
@@ -141,23 +143,23 @@ export async function createPublishTransaction(
141
143
  let outputData: Uint8Array;
142
144
 
143
145
  if (version === ProtocolVersion.V1) {
144
- // V1 format: Single index field
146
+ // V1 format: Single index field (a single number, not an array)
145
147
  // For V1, use the first index where content is placed
146
148
  outputData = CKBFSData.pack({
147
- index: [contentStartIndex],
149
+ index: contentStartIndex,
148
150
  checksum,
149
- contentType: textEncoder.encode(contentType),
150
- filename: textEncoder.encode(filename),
151
+ contentType: contentType,
152
+ filename: filename,
151
153
  backLinks: [],
152
154
  }, version);
153
155
  } else {
154
- // V2 format: Multiple indexes
156
+ // V2 format: Multiple indexes (array of numbers)
155
157
  // For V2, use all the indices where content is placed
156
158
  outputData = CKBFSData.pack({
157
159
  indexes: witnessIndices,
158
160
  checksum,
159
- contentType: textEncoder.encode(contentType),
160
- filename: textEncoder.encode(filename),
161
+ contentType,
162
+ filename,
161
163
  backLinks: [],
162
164
  }, version);
163
165
  }
@@ -265,88 +267,98 @@ export async function createAppendTransaction(
265
267
  // Get CKBFS script config early to use version info
266
268
  const config = getCKBFSScriptConfig(network, version);
267
269
 
268
- // Create CKBFS witnesses - this may vary between V1 and V2
270
+ // Create CKBFS witnesses - each chunk already includes the CKBFS header
271
+ // Pass 0 as version byte - this is the protocol version byte in the witness header
272
+ // not to be confused with the Protocol Version (V1 vs V2)
269
273
  const ckbfsWitnesses = createChunkedCKBFSWitnesses(contentChunks);
270
274
 
271
- // Combine the new content chunks
275
+ // Combine the new content chunks for checksum calculation
272
276
  const combinedContent = Buffer.concat(contentChunks);
273
277
 
274
- // Instead of calculating a new checksum from scratch, update the existing checksum
275
- // with the new content - this is more efficient and matches the Adler32 algorithm's
276
- // cumulative nature
278
+ // Update the existing checksum with the new content - this matches Adler32's
279
+ // cumulative nature as required by Rule 11 in the RFC
277
280
  const contentChecksum = await updateChecksum(data.checksum, combinedContent);
278
281
  console.log(`Updated checksum from ${data.checksum} to ${contentChecksum} for appended content`);
279
282
 
283
+ // Calculate the actual witness indices where our content is placed
284
+ // Index 0 is reserved for the secp256k1 witness for signing
285
+ // So our CKBFS data starts at index 1
286
+ const contentStartIndex = 1;
287
+ const witnessIndices = Array.from(
288
+ { length: contentChunks.length },
289
+ (_, i) => contentStartIndex + i
290
+ );
291
+
280
292
  // Create backlink for the current state based on version
281
293
  let newBackLink: any;
282
294
 
283
295
  if (version === ProtocolVersion.V1) {
284
296
  // V1 format: Use index field (single number)
285
297
  newBackLink = {
286
- txHash: outPoint.txHash,
287
- index: data.index && data.index.length > 0 ? data.index[0] : 0,
298
+ // In V1, field order is index, checksum, txHash
299
+ // and index is a single number value, not an array
300
+ index: data.index || (data.indexes && data.indexes.length > 0 ? data.indexes[0] : 0),
288
301
  checksum: data.checksum,
302
+ txHash: outPoint.txHash,
289
303
  };
290
304
  } else {
291
305
  // V2 format: Use indexes field (array of numbers)
292
306
  newBackLink = {
293
- txHash: outPoint.txHash,
294
- indexes: data.indexes || data.index || [],
307
+ // In V2, field order is indexes, checksum, txHash
308
+ // and indexes is an array of numbers
309
+ indexes: data.indexes || (data.index ? [data.index] : []),
295
310
  checksum: data.checksum,
311
+ txHash: outPoint.txHash,
296
312
  };
297
313
  }
298
314
 
299
- // Update backlinks
315
+ // Update backlinks - add the new one to the existing backlinks array
300
316
  const backLinks = [...(data.backLinks || []), newBackLink];
301
317
 
302
- // Define indices based on version
318
+ // Define output data based on version
303
319
  let outputData: Uint8Array;
304
320
 
305
- // Calculate the actual witness indices where our content is placed
306
- // Index 0 is reserved for the secp256k1 witness for signing
307
- // So our CKBFS data starts at index 1
308
- const contentStartIndex = 1;
309
- const witnessIndices = Array.from(
310
- { length: contentChunks.length },
311
- (_, i) => contentStartIndex + i
312
- );
313
-
314
321
  if (version === ProtocolVersion.V1) {
315
- // In V1, use the first index where content is placed
316
- // (even if we have multiple witnesses, V1 only supports a single index)
322
+ // In V1, index is a single number, not an array
323
+ // The first witness index is used (V1 can only reference one witness)
317
324
  outputData = CKBFSData.pack({
318
- index: [contentStartIndex],
325
+ index: witnessIndices[0], // Use only the first index as a number
319
326
  checksum: contentChecksum,
320
327
  contentType: data.contentType,
321
328
  filename: data.filename,
322
329
  backLinks,
323
- }, version);
330
+ }, ProtocolVersion.V1); // Explicitly use V1 for packing
324
331
  } else {
325
- // In V2, use all the indices where content is placed
332
+ // In V2, indexes is an array of witness indices
326
333
  outputData = CKBFSData.pack({
327
334
  indexes: witnessIndices,
328
335
  checksum: contentChecksum,
329
336
  contentType: data.contentType,
330
337
  filename: data.filename,
331
338
  backLinks,
332
- }, version);
339
+ }, ProtocolVersion.V2); // Explicitly use V2 for packing
333
340
  }
334
341
 
335
342
  // Pack the original data to get its size - use the appropriate version
336
343
  const originalData = CKBFSData.pack(data, version);
337
344
  const originalDataSize = originalData.length;
338
345
 
339
- // Get sizes
346
+ // Get sizes and calculate capacity requirements
340
347
  const newDataSize = outputData.length;
341
- const dataSizeDiff = newDataSize - originalDataSize;
342
348
 
343
- // Calculate the additional capacity needed (in shannons)
344
- // CKB requires 1 shannon per byte of data
345
- const additionalCapacity = BigInt(Math.max(0, dataSizeDiff)) * 100000000n;
349
+ // Calculate the required capacity for the output cell
350
+ // This accounts for:
351
+ // 1. The output data size
352
+ // 2. The type script's occupied size
353
+ // 3. The lock script's occupied size
354
+ // 4. A constant of 8 bytes (for header overhead)
355
+ const ckbfsCellSize = BigInt(outputData.length + type.occupiedSize + lock.occupiedSize + 8) * 100000000n;
356
+
357
+ console.log(`Original capacity: ${capacity}, Calculated size: ${ckbfsCellSize}, Data size: ${outputData.length}`);
346
358
 
347
- // Add the additional capacity to the original cell capacity
348
- console.log(`Original capacity: ${capacity}, Additional needed: ${additionalCapacity}, Data size diff: ${dataSizeDiff}, Version: ${version}`);
349
- const outputCapacity = capacity + additionalCapacity;
359
+ // Use the maximum value between calculated size and original capacity
360
+ // to ensure we have enough capacity but don't decrease capacity unnecessarily
361
+ const outputCapacity = ckbfsCellSize > capacity ? ckbfsCellSize : capacity;
350
362
 
351
363
  // Create initial transaction with the CKBFS cell input
352
364
  const tx = Transaction.from({
@@ -388,7 +400,8 @@ export async function createAppendTransaction(
388
400
  const address = await signer.getRecommendedAddressObj();
389
401
 
390
402
  // If we need more capacity than the original cell had, add additional inputs
391
- if (additionalCapacity > 0n) {
403
+ if (outputCapacity > capacity) {
404
+ console.log(`Need additional capacity: ${outputCapacity - capacity} shannons`);
392
405
  // Add more inputs to cover the increased capacity
393
406
  await tx.completeInputsByCapacity(signer);
394
407
  }
@@ -10,9 +10,10 @@ import { CKBFS_HEADER } from './molecule';
10
10
  * @param version Optional version byte (default is 0)
11
11
  * @returns Uint8Array containing the witness data
12
12
  */
13
- export function createCKBFSWitness(content: Uint8Array, version: number = 0): Uint8Array {
13
+ export function createCKBFSWitness(content: Uint8Array): Uint8Array {
14
14
  // Create witness with CKBFS header, version byte, and content
15
- const versionByte = new Uint8Array([version]);
15
+ // Version byte must always be 0x00 per protocol
16
+ const versionByte = new Uint8Array([0]);
16
17
  return Buffer.concat([CKBFS_HEADER, versionByte, content]);
17
18
  }
18
19
 
@@ -22,10 +23,10 @@ export function createCKBFSWitness(content: Uint8Array, version: number = 0): Ui
22
23
  * @param version Optional version byte (default is 0)
23
24
  * @returns Uint8Array containing the witness data
24
25
  */
25
- export function createTextCKBFSWitness(text: string, version: number = 0): Uint8Array {
26
+ export function createTextCKBFSWitness(text: string): Uint8Array {
26
27
  const textEncoder = new TextEncoder();
27
28
  const contentBytes = textEncoder.encode(text);
28
- return createCKBFSWitness(contentBytes, version);
29
+ return createCKBFSWitness(contentBytes);
29
30
  }
30
31
 
31
32
  /**
@@ -71,6 +72,6 @@ export function isCKBFSWitness(witness: Uint8Array): boolean {
71
72
  * @param version Optional version byte (default is 0)
72
73
  * @returns Array of Uint8Array witnesses
73
74
  */
74
- export function createChunkedCKBFSWitnesses(contentChunks: Uint8Array[], version: number = 0): Uint8Array[] {
75
- return contentChunks.map(chunk => createCKBFSWitness(chunk, version));
75
+ export function createChunkedCKBFSWitnesses(contentChunks: Uint8Array[]): Uint8Array[] {
76
+ return contentChunks.map(chunk => createCKBFSWitness(chunk));
76
77
  }
@@ -0,0 +1,2 @@
1
+ Hello, CKBFS! This is a sample file published using 20240906.ce6724722cf6 protocol.
2
+ This is content to append to the previously published file.