@sqlitecloud/drivers 1.0.354 → 1.0.406

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -320,7 +320,8 @@ function serializeCommand(data, zeroString = false) {
320
320
  const zs = i == 0 || zeroString;
321
321
  serializedData += serializeData(data[i], zs);
322
322
  }
323
- const header = `${exports.CMD_ARRAY}${serializedData.length} `;
323
+ const bytesTotal = buffer_1.Buffer.byteLength(serializedData, 'utf-8');
324
+ const header = `${exports.CMD_ARRAY}${bytesTotal} `;
324
325
  return header + serializedData;
325
326
  }
326
327
  function serializeData(data, zeroString = false) {
@@ -70,7 +70,7 @@ eval("\n//\n// database.ts - database driver api, implements and extends sqlite3
70
70
  /***/ ((__unused_webpack_module, exports, __webpack_require__) => {
71
71
 
72
72
  "use strict";
73
- eval("\n//\n// protocol.ts - low level protocol handling for SQLiteCloud transport\n//\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ROWSET_CHUNKS_END = exports.CMD_PUBSUB = exports.CMD_ARRAY = exports.CMD_COMMAND = exports.CMD_COMPRESSED = exports.CMD_BLOB = exports.CMD_NULL = exports.CMD_JSON = exports.CMD_ROWSET_CHUNK = exports.CMD_ROWSET = exports.CMD_FLOAT = exports.CMD_INT = exports.CMD_ERROR = exports.CMD_ZEROSTRING = exports.CMD_STRING = void 0;\nexports.hasCommandLength = hasCommandLength;\nexports.parseCommandLength = parseCommandLength;\nexports.decompressBuffer = decompressBuffer;\nexports.parseError = parseError;\nexports.parseArray = parseArray;\nexports.parseRowsetHeader = parseRowsetHeader;\nexports.bufferStartsWith = bufferStartsWith;\nexports.bufferEndsWith = bufferEndsWith;\nexports.parseRowsetChunks = parseRowsetChunks;\nexports.popData = popData;\nexports.formatCommand = formatCommand;\nconst types_1 = __webpack_require__(/*! ./types */ \"./lib/drivers/types.js\");\nconst rowset_1 = __webpack_require__(/*! ./rowset */ \"./lib/drivers/rowset.js\");\n// explicitly importing buffer library to allow cross-platform support by replacing it\nconst buffer_1 = __webpack_require__(/*! buffer */ \"./node_modules/buffer/index.js\");\n// https://www.npmjs.com/package/lz4js\nconst lz4 = __webpack_require__(/*! lz4js */ \"./node_modules/lz4js/lz4.js\");\n// The server communicates with clients via commands defined in\n// SQLiteCloud Server Protocol (SCSP), see more at:\n// https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md\nexports.CMD_STRING = '+';\nexports.CMD_ZEROSTRING = '!';\nexports.CMD_ERROR = '-';\nexports.CMD_INT = ':';\nexports.CMD_FLOAT = ',';\nexports.CMD_ROWSET = '*';\nexports.CMD_ROWSET_CHUNK = '/';\nexports.CMD_JSON = '#';\nexports.CMD_NULL = '_';\nexports.CMD_BLOB = '$';\nexports.CMD_COMPRESSED = '%';\nexports.CMD_COMMAND = '^';\nexports.CMD_ARRAY = '=';\n// const CMD_RAWJSON = '{'\nexports.CMD_PUBSUB = '|';\n// const CMD_RECONNECT = '@'\n// To mark the end of the Rowset, the special string /LEN 0 0 0 is sent (LEN is always 6 in this case)\n// https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-rowset-chunk\nexports.ROWSET_CHUNKS_END = '/6 0 0 0 ';\n//\n// utility functions\n//\n/** Analyze first character to check if corresponding data type has LEN */\nfunction hasCommandLength(firstCharacter) {\n return firstCharacter == exports.CMD_INT || firstCharacter == exports.CMD_FLOAT || firstCharacter == exports.CMD_NULL ? false : true;\n}\n/** Analyze a command with explict LEN and extract it */\nfunction parseCommandLength(data) {\n return parseInt(data.subarray(1, data.indexOf(' ')).toString('utf8'));\n}\n/** Receive a compressed buffer, decompress with lz4, return buffer and datatype */\nfunction decompressBuffer(buffer) {\n // https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-compression\n // jest test/database.test.ts -t \"select large result set\"\n // starts with %<commandLength> <compressed> <uncompressed>\n const spaceIndex = buffer.indexOf(' ');\n const commandLength = parseInt(buffer.subarray(1, spaceIndex).toString('utf8'));\n let commandBuffer = buffer.subarray(spaceIndex + 1, spaceIndex + 1 + commandLength);\n const remainingBuffer = buffer.subarray(spaceIndex + 1 + commandLength);\n // extract compressed + decompressed size\n const compressedSize = parseInt(commandBuffer.subarray(0, commandBuffer.indexOf(' ') + 1).toString('utf8'));\n commandBuffer = commandBuffer.subarray(commandBuffer.indexOf(' ') + 1);\n const decompressedSize = parseInt(commandBuffer.subarray(0, commandBuffer.indexOf(' ') + 1).toString('utf8'));\n commandBuffer = commandBuffer.subarray(commandBuffer.indexOf(' ') + 1);\n // extract compressed dataType\n const dataType = commandBuffer.subarray(0, 1).toString('utf8');\n let decompressedBuffer = buffer_1.Buffer.alloc(decompressedSize);\n const compressedBuffer = commandBuffer.subarray(commandBuffer.length - compressedSize);\n // lz4js library is javascript and doesn't have types so we silence the type check\n const decompressionResult = lz4.decompressBlock(compressedBuffer, decompressedBuffer, 0, compressedSize, 0);\n // the entire command is composed of the header (which is not compressed) + the decompressed block\n decompressedBuffer = buffer_1.Buffer.concat([commandBuffer.subarray(0, commandBuffer.length - compressedSize), decompressedBuffer]);\n if (decompressionResult <= 0 || decompressionResult !== decompressedSize) {\n throw new Error(`lz4 decompression error at offset ${decompressionResult}`);\n }\n return { buffer: decompressedBuffer, dataType, remainingBuffer };\n}\n/** Parse error message or extended error message */\nfunction parseError(buffer, spaceIndex) {\n const errorBuffer = buffer.subarray(spaceIndex + 1);\n const errorString = errorBuffer.toString('utf8');\n const parts = errorString.split(' ');\n let errorCodeStr = parts.shift() || '0'; // Default errorCode is '0' if not present\n let extErrCodeStr = '0'; // Default extended error code\n let offsetCodeStr = '-1'; // Default offset code\n // Split the errorCode by ':' to check for extended error codes\n const errorCodeParts = errorCodeStr.split(':');\n errorCodeStr = errorCodeParts[0];\n if (errorCodeParts.length > 1) {\n extErrCodeStr = errorCodeParts[1];\n if (errorCodeParts.length > 2) {\n offsetCodeStr = errorCodeParts[2];\n }\n }\n // Rest of the error string is the error message\n const errorMessage = parts.join(' ');\n // Parse error codes to integers safely, defaulting to 0 if NaN\n const errorCode = parseInt(errorCodeStr);\n const extErrCode = parseInt(extErrCodeStr);\n const offsetCode = parseInt(offsetCodeStr);\n // create an Error object and add the custom properties\n throw new types_1.SQLiteCloudError(errorMessage, {\n errorCode: errorCode.toString(),\n externalErrorCode: extErrCode.toString(),\n offsetCode\n });\n}\n/** Parse an array of items (each of which will be parsed by type separately) */\nfunction parseArray(buffer, spaceIndex) {\n const parsedData = [];\n const array = buffer.subarray(spaceIndex + 1, buffer.length);\n const numberOfItems = parseInt(array.subarray(0, spaceIndex - 2).toString('utf8'));\n let arrayItems = array.subarray(array.indexOf(' ') + 1, array.length);\n for (let i = 0; i < numberOfItems; i++) {\n const { data, fwdBuffer: buffer } = popData(arrayItems);\n parsedData.push(data);\n arrayItems = buffer;\n }\n return parsedData;\n}\n/** Parse header in a rowset or chunk of a chunked rowset */\nfunction parseRowsetHeader(buffer) {\n const index = parseInt(buffer.subarray(0, buffer.indexOf(':') + 1).toString());\n buffer = buffer.subarray(buffer.indexOf(':') + 1);\n // extract rowset header\n const { data, fwdBuffer } = popIntegers(buffer, 3);\n const result = {\n index,\n metadata: {\n version: data[0],\n numberOfRows: data[1],\n numberOfColumns: data[2],\n columns: []\n },\n fwdBuffer\n };\n // console.debug(`parseRowsetHeader`, result)\n return result;\n}\n/** Extract column names and, optionally, more metadata out of a rowset's header */\nfunction parseRowsetColumnsMetadata(buffer, metadata) {\n function popForward() {\n const { data, fwdBuffer: fwdBuffer } = popData(buffer); // buffer in parent scope\n buffer = fwdBuffer;\n return data;\n }\n for (let i = 0; i < metadata.numberOfColumns; i++) {\n metadata.columns.push({ name: popForward() });\n }\n // extract additional metadata if rowset has version 2\n if (metadata.version == 2) {\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].type = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].database = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].table = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].column = popForward(); // original column name\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].notNull = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].primaryKey = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].autoIncrement = popForward();\n }\n return buffer;\n}\n/** Parse a regular rowset (no chunks) */\nfunction parseRowset(buffer, spaceIndex) {\n buffer = buffer.subarray(spaceIndex + 1, buffer.length);\n const { metadata, fwdBuffer } = parseRowsetHeader(buffer);\n buffer = parseRowsetColumnsMetadata(fwdBuffer, metadata);\n // decode each rowset item\n const data = [];\n for (let j = 0; j < metadata.numberOfRows * metadata.numberOfColumns; j++) {\n const { data: rowData, fwdBuffer } = popData(buffer);\n data.push(rowData);\n buffer = fwdBuffer;\n }\n console.assert(data && data.length === metadata.numberOfRows * metadata.numberOfColumns, 'SQLiteCloudConnection.parseRowset - invalid rowset data');\n return new rowset_1.SQLiteCloudRowset(metadata, data);\n}\nfunction bufferStartsWith(buffer, prefix) {\n return buffer.length >= prefix.length && buffer.subarray(0, prefix.length).toString('utf8') === prefix;\n}\nfunction bufferEndsWith(buffer, suffix) {\n return buffer.length >= suffix.length && buffer.subarray(buffer.length - suffix.length, buffer.length).toString('utf8') === suffix;\n}\n/**\n * Parse a chunk of a chunked rowset command, eg:\n * *LEN 0:VERS NROWS NCOLS DATA\n * @see https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-rowset-chunk\n */\nfunction parseRowsetChunks(buffers) {\n let buffer = buffer_1.Buffer.concat(buffers);\n if (!bufferStartsWith(buffer, exports.CMD_ROWSET_CHUNK) || !bufferEndsWith(buffer, exports.ROWSET_CHUNKS_END)) {\n throw new Error('SQLiteCloudConnection.parseRowsetChunks - invalid chunks buffer');\n }\n let metadata = { version: 1, numberOfColumns: 0, numberOfRows: 0, columns: [] };\n const data = [];\n // validate and skip data type\n const dataType = buffer.subarray(0, 1).toString();\n if (dataType !== exports.CMD_ROWSET_CHUNK)\n throw new Error(`parseRowsetChunks - dataType: ${dataType} should be CMD_ROWSET_CHUNK`);\n buffer = buffer.subarray(buffer.indexOf(' ') + 1);\n while (buffer.length > 0 && !bufferStartsWith(buffer, exports.ROWSET_CHUNKS_END)) {\n // chunk header, eg: 0:VERS NROWS NCOLS\n const { index: chunkIndex, metadata: chunkMetadata, fwdBuffer } = parseRowsetHeader(buffer);\n buffer = fwdBuffer;\n // first chunk? extract columns metadata\n if (chunkIndex === 1) {\n metadata = chunkMetadata;\n buffer = parseRowsetColumnsMetadata(buffer, metadata);\n }\n else {\n metadata.numberOfRows += chunkMetadata.numberOfRows;\n }\n // extract single rowset row\n for (let k = 0; k < chunkMetadata.numberOfRows * metadata.numberOfColumns; k++) {\n const { data: itemData, fwdBuffer } = popData(buffer);\n data.push(itemData);\n buffer = fwdBuffer;\n }\n }\n console.assert(data && data.length === metadata.numberOfRows * metadata.numberOfColumns, 'parseRowsetChunks - invalid rowset data');\n const rowset = new rowset_1.SQLiteCloudRowset(metadata, data);\n // console.debug(`parseRowsetChunks - ${rowset.numberOfRows} rows, ${rowset.numberOfColumns} columns`)\n return rowset;\n}\n/** Pop one or more space separated integers from beginning of buffer, move buffer forward */\nfunction popIntegers(buffer, numberOfIntegers = 1) {\n const data = [];\n for (let i = 0; i < numberOfIntegers; i++) {\n const spaceIndex = buffer.indexOf(' ');\n data[i] = parseInt(buffer.subarray(0, spaceIndex).toString());\n buffer = buffer.subarray(spaceIndex + 1);\n }\n return { data, fwdBuffer: buffer };\n}\n/** Parse command, extract its data, return the data and the buffer moved to the first byte after the command */\nfunction popData(buffer) {\n function popResults(data) {\n const fwdBuffer = buffer.subarray(commandEnd);\n return { data, fwdBuffer };\n }\n // first character is the data type\n console.assert(buffer && buffer instanceof buffer_1.Buffer);\n let dataType = buffer.subarray(0, 1).toString('utf8');\n if (dataType == exports.CMD_COMPRESSED)\n throw new Error('Compressed data should be decompressed before parsing');\n if (dataType == exports.CMD_ROWSET_CHUNK)\n throw new Error('Chunked data should be parsed by parseRowsetChunks');\n let spaceIndex = buffer.indexOf(' ');\n if (spaceIndex === -1) {\n spaceIndex = buffer.length - 1;\n }\n let commandEnd = -1, commandLength = -1;\n if (dataType === exports.CMD_INT || dataType === exports.CMD_FLOAT || dataType === exports.CMD_NULL) {\n commandEnd = spaceIndex + 1;\n }\n else {\n commandLength = parseInt(buffer.subarray(1, spaceIndex).toString());\n commandEnd = spaceIndex + 1 + commandLength;\n }\n // console.debug(`popData - dataType: ${dataType}, spaceIndex: ${spaceIndex}, commandLength: ${commandLength}, commandEnd: ${commandEnd}`)\n switch (dataType) {\n case exports.CMD_INT:\n return popResults(parseInt(buffer.subarray(1, spaceIndex).toString()));\n case exports.CMD_FLOAT:\n return popResults(parseFloat(buffer.subarray(1, spaceIndex).toString()));\n case exports.CMD_NULL:\n return popResults(null);\n case exports.CMD_STRING:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_ZEROSTRING:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd - 1).toString('utf8'));\n case exports.CMD_COMMAND:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_PUBSUB:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_JSON:\n return popResults(JSON.parse(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8')));\n case exports.CMD_BLOB:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd));\n case exports.CMD_ARRAY:\n return popResults(parseArray(buffer, spaceIndex));\n case exports.CMD_ROWSET:\n return popResults(parseRowset(buffer, spaceIndex));\n case exports.CMD_ERROR:\n parseError(buffer, spaceIndex); // throws custom error\n break;\n }\n const msg = `popData - Data type: ${Number(dataType)} '${dataType}' is not defined in SCSP, spaceIndex: ${spaceIndex}, commandLength: ${commandLength}, commandEnd: ${commandEnd}`;\n console.error(msg);\n throw new TypeError(msg);\n}\n/** Format a command to be sent via SCSP protocol */\nfunction formatCommand(command) {\n // core returns null if there's a space after the semi column\n // we want to maintain a compatibility with the standard sqlite3 driver\n command.query = command.query.trim();\n if (command.parameters && command.parameters.length > 0) {\n // by SCSP the string paramenters in the array are zero-terminated\n return serializeCommand([command.query, ...(command.parameters || [])], true);\n }\n return serializeData(command.query, false);\n}\nfunction serializeCommand(data, zeroString = false) {\n const n = data.length;\n let serializedData = `${n} `;\n for (let i = 0; i < n; i++) {\n // the first string is the sql and it must be zero-terminated\n const zs = i == 0 || zeroString;\n serializedData += serializeData(data[i], zs);\n }\n const header = `${exports.CMD_ARRAY}${serializedData.length} `;\n return header + serializedData;\n}\nfunction serializeData(data, zeroString = false) {\n if (typeof data === 'string') {\n let cmd = exports.CMD_STRING;\n if (zeroString) {\n cmd = exports.CMD_ZEROSTRING;\n data += '\\0';\n }\n const header = `${cmd}${buffer_1.Buffer.byteLength(data, 'utf-8')} `;\n return header + data;\n }\n if (typeof data === 'number') {\n if (Number.isInteger(data)) {\n return `${exports.CMD_INT}${data} `;\n }\n else {\n return `${exports.CMD_FLOAT}${data} `;\n }\n }\n if (buffer_1.Buffer.isBuffer(data)) {\n const header = `${exports.CMD_BLOB}${data.length} `;\n return header + data.toString('utf-8');\n }\n if (data === null || data === undefined) {\n return `${exports.CMD_NULL} `;\n }\n if (Array.isArray(data)) {\n return serializeCommand(data, zeroString);\n }\n throw new Error(`Unsupported data type for serialization: ${typeof data}`);\n}\n\n\n//# sourceURL=webpack://sqlitecloud/./lib/drivers/protocol.js?");
73
+ eval("\n//\n// protocol.ts - low level protocol handling for SQLiteCloud transport\n//\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ROWSET_CHUNKS_END = exports.CMD_PUBSUB = exports.CMD_ARRAY = exports.CMD_COMMAND = exports.CMD_COMPRESSED = exports.CMD_BLOB = exports.CMD_NULL = exports.CMD_JSON = exports.CMD_ROWSET_CHUNK = exports.CMD_ROWSET = exports.CMD_FLOAT = exports.CMD_INT = exports.CMD_ERROR = exports.CMD_ZEROSTRING = exports.CMD_STRING = void 0;\nexports.hasCommandLength = hasCommandLength;\nexports.parseCommandLength = parseCommandLength;\nexports.decompressBuffer = decompressBuffer;\nexports.parseError = parseError;\nexports.parseArray = parseArray;\nexports.parseRowsetHeader = parseRowsetHeader;\nexports.bufferStartsWith = bufferStartsWith;\nexports.bufferEndsWith = bufferEndsWith;\nexports.parseRowsetChunks = parseRowsetChunks;\nexports.popData = popData;\nexports.formatCommand = formatCommand;\nconst types_1 = __webpack_require__(/*! ./types */ \"./lib/drivers/types.js\");\nconst rowset_1 = __webpack_require__(/*! ./rowset */ \"./lib/drivers/rowset.js\");\n// explicitly importing buffer library to allow cross-platform support by replacing it\nconst buffer_1 = __webpack_require__(/*! buffer */ \"./node_modules/buffer/index.js\");\n// https://www.npmjs.com/package/lz4js\nconst lz4 = __webpack_require__(/*! lz4js */ \"./node_modules/lz4js/lz4.js\");\n// The server communicates with clients via commands defined in\n// SQLiteCloud Server Protocol (SCSP), see more at:\n// https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md\nexports.CMD_STRING = '+';\nexports.CMD_ZEROSTRING = '!';\nexports.CMD_ERROR = '-';\nexports.CMD_INT = ':';\nexports.CMD_FLOAT = ',';\nexports.CMD_ROWSET = '*';\nexports.CMD_ROWSET_CHUNK = '/';\nexports.CMD_JSON = '#';\nexports.CMD_NULL = '_';\nexports.CMD_BLOB = '$';\nexports.CMD_COMPRESSED = '%';\nexports.CMD_COMMAND = '^';\nexports.CMD_ARRAY = '=';\n// const CMD_RAWJSON = '{'\nexports.CMD_PUBSUB = '|';\n// const CMD_RECONNECT = '@'\n// To mark the end of the Rowset, the special string /LEN 0 0 0 is sent (LEN is always 6 in this case)\n// https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-rowset-chunk\nexports.ROWSET_CHUNKS_END = '/6 0 0 0 ';\n//\n// utility functions\n//\n/** Analyze first character to check if corresponding data type has LEN */\nfunction hasCommandLength(firstCharacter) {\n return firstCharacter == exports.CMD_INT || firstCharacter == exports.CMD_FLOAT || firstCharacter == exports.CMD_NULL ? false : true;\n}\n/** Analyze a command with explict LEN and extract it */\nfunction parseCommandLength(data) {\n return parseInt(data.subarray(1, data.indexOf(' ')).toString('utf8'));\n}\n/** Receive a compressed buffer, decompress with lz4, return buffer and datatype */\nfunction decompressBuffer(buffer) {\n // https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-compression\n // jest test/database.test.ts -t \"select large result set\"\n // starts with %<commandLength> <compressed> <uncompressed>\n const spaceIndex = buffer.indexOf(' ');\n const commandLength = parseInt(buffer.subarray(1, spaceIndex).toString('utf8'));\n let commandBuffer = buffer.subarray(spaceIndex + 1, spaceIndex + 1 + commandLength);\n const remainingBuffer = buffer.subarray(spaceIndex + 1 + commandLength);\n // extract compressed + decompressed size\n const compressedSize = parseInt(commandBuffer.subarray(0, commandBuffer.indexOf(' ') + 1).toString('utf8'));\n commandBuffer = commandBuffer.subarray(commandBuffer.indexOf(' ') + 1);\n const decompressedSize = parseInt(commandBuffer.subarray(0, commandBuffer.indexOf(' ') + 1).toString('utf8'));\n commandBuffer = commandBuffer.subarray(commandBuffer.indexOf(' ') + 1);\n // extract compressed dataType\n const dataType = commandBuffer.subarray(0, 1).toString('utf8');\n let decompressedBuffer = buffer_1.Buffer.alloc(decompressedSize);\n const compressedBuffer = commandBuffer.subarray(commandBuffer.length - compressedSize);\n // lz4js library is javascript and doesn't have types so we silence the type check\n const decompressionResult = lz4.decompressBlock(compressedBuffer, decompressedBuffer, 0, compressedSize, 0);\n // the entire command is composed of the header (which is not compressed) + the decompressed block\n decompressedBuffer = buffer_1.Buffer.concat([commandBuffer.subarray(0, commandBuffer.length - compressedSize), decompressedBuffer]);\n if (decompressionResult <= 0 || decompressionResult !== decompressedSize) {\n throw new Error(`lz4 decompression error at offset ${decompressionResult}`);\n }\n return { buffer: decompressedBuffer, dataType, remainingBuffer };\n}\n/** Parse error message or extended error message */\nfunction parseError(buffer, spaceIndex) {\n const errorBuffer = buffer.subarray(spaceIndex + 1);\n const errorString = errorBuffer.toString('utf8');\n const parts = errorString.split(' ');\n let errorCodeStr = parts.shift() || '0'; // Default errorCode is '0' if not present\n let extErrCodeStr = '0'; // Default extended error code\n let offsetCodeStr = '-1'; // Default offset code\n // Split the errorCode by ':' to check for extended error codes\n const errorCodeParts = errorCodeStr.split(':');\n errorCodeStr = errorCodeParts[0];\n if (errorCodeParts.length > 1) {\n extErrCodeStr = errorCodeParts[1];\n if (errorCodeParts.length > 2) {\n offsetCodeStr = errorCodeParts[2];\n }\n }\n // Rest of the error string is the error message\n const errorMessage = parts.join(' ');\n // Parse error codes to integers safely, defaulting to 0 if NaN\n const errorCode = parseInt(errorCodeStr);\n const extErrCode = parseInt(extErrCodeStr);\n const offsetCode = parseInt(offsetCodeStr);\n // create an Error object and add the custom properties\n throw new types_1.SQLiteCloudError(errorMessage, {\n errorCode: errorCode.toString(),\n externalErrorCode: extErrCode.toString(),\n offsetCode\n });\n}\n/** Parse an array of items (each of which will be parsed by type separately) */\nfunction parseArray(buffer, spaceIndex) {\n const parsedData = [];\n const array = buffer.subarray(spaceIndex + 1, buffer.length);\n const numberOfItems = parseInt(array.subarray(0, spaceIndex - 2).toString('utf8'));\n let arrayItems = array.subarray(array.indexOf(' ') + 1, array.length);\n for (let i = 0; i < numberOfItems; i++) {\n const { data, fwdBuffer: buffer } = popData(arrayItems);\n parsedData.push(data);\n arrayItems = buffer;\n }\n return parsedData;\n}\n/** Parse header in a rowset or chunk of a chunked rowset */\nfunction parseRowsetHeader(buffer) {\n const index = parseInt(buffer.subarray(0, buffer.indexOf(':') + 1).toString());\n buffer = buffer.subarray(buffer.indexOf(':') + 1);\n // extract rowset header\n const { data, fwdBuffer } = popIntegers(buffer, 3);\n const result = {\n index,\n metadata: {\n version: data[0],\n numberOfRows: data[1],\n numberOfColumns: data[2],\n columns: []\n },\n fwdBuffer\n };\n // console.debug(`parseRowsetHeader`, result)\n return result;\n}\n/** Extract column names and, optionally, more metadata out of a rowset's header */\nfunction parseRowsetColumnsMetadata(buffer, metadata) {\n function popForward() {\n const { data, fwdBuffer: fwdBuffer } = popData(buffer); // buffer in parent scope\n buffer = fwdBuffer;\n return data;\n }\n for (let i = 0; i < metadata.numberOfColumns; i++) {\n metadata.columns.push({ name: popForward() });\n }\n // extract additional metadata if rowset has version 2\n if (metadata.version == 2) {\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].type = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].database = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].table = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].column = popForward(); // original column name\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].notNull = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].primaryKey = popForward();\n for (let i = 0; i < metadata.numberOfColumns; i++)\n metadata.columns[i].autoIncrement = popForward();\n }\n return buffer;\n}\n/** Parse a regular rowset (no chunks) */\nfunction parseRowset(buffer, spaceIndex) {\n buffer = buffer.subarray(spaceIndex + 1, buffer.length);\n const { metadata, fwdBuffer } = parseRowsetHeader(buffer);\n buffer = parseRowsetColumnsMetadata(fwdBuffer, metadata);\n // decode each rowset item\n const data = [];\n for (let j = 0; j < metadata.numberOfRows * metadata.numberOfColumns; j++) {\n const { data: rowData, fwdBuffer } = popData(buffer);\n data.push(rowData);\n buffer = fwdBuffer;\n }\n console.assert(data && data.length === metadata.numberOfRows * metadata.numberOfColumns, 'SQLiteCloudConnection.parseRowset - invalid rowset data');\n return new rowset_1.SQLiteCloudRowset(metadata, data);\n}\nfunction bufferStartsWith(buffer, prefix) {\n return buffer.length >= prefix.length && buffer.subarray(0, prefix.length).toString('utf8') === prefix;\n}\nfunction bufferEndsWith(buffer, suffix) {\n return buffer.length >= suffix.length && buffer.subarray(buffer.length - suffix.length, buffer.length).toString('utf8') === suffix;\n}\n/**\n * Parse a chunk of a chunked rowset command, eg:\n * *LEN 0:VERS NROWS NCOLS DATA\n * @see https://github.com/sqlitecloud/sdk/blob/master/PROTOCOL.md#scsp-rowset-chunk\n */\nfunction parseRowsetChunks(buffers) {\n let buffer = buffer_1.Buffer.concat(buffers);\n if (!bufferStartsWith(buffer, exports.CMD_ROWSET_CHUNK) || !bufferEndsWith(buffer, exports.ROWSET_CHUNKS_END)) {\n throw new Error('SQLiteCloudConnection.parseRowsetChunks - invalid chunks buffer');\n }\n let metadata = { version: 1, numberOfColumns: 0, numberOfRows: 0, columns: [] };\n const data = [];\n // validate and skip data type\n const dataType = buffer.subarray(0, 1).toString();\n if (dataType !== exports.CMD_ROWSET_CHUNK)\n throw new Error(`parseRowsetChunks - dataType: ${dataType} should be CMD_ROWSET_CHUNK`);\n buffer = buffer.subarray(buffer.indexOf(' ') + 1);\n while (buffer.length > 0 && !bufferStartsWith(buffer, exports.ROWSET_CHUNKS_END)) {\n // chunk header, eg: 0:VERS NROWS NCOLS\n const { index: chunkIndex, metadata: chunkMetadata, fwdBuffer } = parseRowsetHeader(buffer);\n buffer = fwdBuffer;\n // first chunk? extract columns metadata\n if (chunkIndex === 1) {\n metadata = chunkMetadata;\n buffer = parseRowsetColumnsMetadata(buffer, metadata);\n }\n else {\n metadata.numberOfRows += chunkMetadata.numberOfRows;\n }\n // extract single rowset row\n for (let k = 0; k < chunkMetadata.numberOfRows * metadata.numberOfColumns; k++) {\n const { data: itemData, fwdBuffer } = popData(buffer);\n data.push(itemData);\n buffer = fwdBuffer;\n }\n }\n console.assert(data && data.length === metadata.numberOfRows * metadata.numberOfColumns, 'parseRowsetChunks - invalid rowset data');\n const rowset = new rowset_1.SQLiteCloudRowset(metadata, data);\n // console.debug(`parseRowsetChunks - ${rowset.numberOfRows} rows, ${rowset.numberOfColumns} columns`)\n return rowset;\n}\n/** Pop one or more space separated integers from beginning of buffer, move buffer forward */\nfunction popIntegers(buffer, numberOfIntegers = 1) {\n const data = [];\n for (let i = 0; i < numberOfIntegers; i++) {\n const spaceIndex = buffer.indexOf(' ');\n data[i] = parseInt(buffer.subarray(0, spaceIndex).toString());\n buffer = buffer.subarray(spaceIndex + 1);\n }\n return { data, fwdBuffer: buffer };\n}\n/** Parse command, extract its data, return the data and the buffer moved to the first byte after the command */\nfunction popData(buffer) {\n function popResults(data) {\n const fwdBuffer = buffer.subarray(commandEnd);\n return { data, fwdBuffer };\n }\n // first character is the data type\n console.assert(buffer && buffer instanceof buffer_1.Buffer);\n let dataType = buffer.subarray(0, 1).toString('utf8');\n if (dataType == exports.CMD_COMPRESSED)\n throw new Error('Compressed data should be decompressed before parsing');\n if (dataType == exports.CMD_ROWSET_CHUNK)\n throw new Error('Chunked data should be parsed by parseRowsetChunks');\n let spaceIndex = buffer.indexOf(' ');\n if (spaceIndex === -1) {\n spaceIndex = buffer.length - 1;\n }\n let commandEnd = -1, commandLength = -1;\n if (dataType === exports.CMD_INT || dataType === exports.CMD_FLOAT || dataType === exports.CMD_NULL) {\n commandEnd = spaceIndex + 1;\n }\n else {\n commandLength = parseInt(buffer.subarray(1, spaceIndex).toString());\n commandEnd = spaceIndex + 1 + commandLength;\n }\n // console.debug(`popData - dataType: ${dataType}, spaceIndex: ${spaceIndex}, commandLength: ${commandLength}, commandEnd: ${commandEnd}`)\n switch (dataType) {\n case exports.CMD_INT:\n return popResults(parseInt(buffer.subarray(1, spaceIndex).toString()));\n case exports.CMD_FLOAT:\n return popResults(parseFloat(buffer.subarray(1, spaceIndex).toString()));\n case exports.CMD_NULL:\n return popResults(null);\n case exports.CMD_STRING:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_ZEROSTRING:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd - 1).toString('utf8'));\n case exports.CMD_COMMAND:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_PUBSUB:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8'));\n case exports.CMD_JSON:\n return popResults(JSON.parse(buffer.subarray(spaceIndex + 1, commandEnd).toString('utf8')));\n case exports.CMD_BLOB:\n return popResults(buffer.subarray(spaceIndex + 1, commandEnd));\n case exports.CMD_ARRAY:\n return popResults(parseArray(buffer, spaceIndex));\n case exports.CMD_ROWSET:\n return popResults(parseRowset(buffer, spaceIndex));\n case exports.CMD_ERROR:\n parseError(buffer, spaceIndex); // throws custom error\n break;\n }\n const msg = `popData - Data type: ${Number(dataType)} '${dataType}' is not defined in SCSP, spaceIndex: ${spaceIndex}, commandLength: ${commandLength}, commandEnd: ${commandEnd}`;\n console.error(msg);\n throw new TypeError(msg);\n}\n/** Format a command to be sent via SCSP protocol */\nfunction formatCommand(command) {\n // core returns null if there's a space after the semi column\n // we want to maintain a compatibility with the standard sqlite3 driver\n command.query = command.query.trim();\n if (command.parameters && command.parameters.length > 0) {\n // by SCSP the string paramenters in the array are zero-terminated\n return serializeCommand([command.query, ...(command.parameters || [])], true);\n }\n return serializeData(command.query, false);\n}\nfunction serializeCommand(data, zeroString = false) {\n const n = data.length;\n let serializedData = `${n} `;\n for (let i = 0; i < n; i++) {\n // the first string is the sql and it must be zero-terminated\n const zs = i == 0 || zeroString;\n serializedData += serializeData(data[i], zs);\n }\n const bytesTotal = buffer_1.Buffer.byteLength(serializedData, 'utf-8');\n const header = `${exports.CMD_ARRAY}${bytesTotal} `;\n return header + serializedData;\n}\nfunction serializeData(data, zeroString = false) {\n if (typeof data === 'string') {\n let cmd = exports.CMD_STRING;\n if (zeroString) {\n cmd = exports.CMD_ZEROSTRING;\n data += '\\0';\n }\n const header = `${cmd}${buffer_1.Buffer.byteLength(data, 'utf-8')} `;\n return header + data;\n }\n if (typeof data === 'number') {\n if (Number.isInteger(data)) {\n return `${exports.CMD_INT}${data} `;\n }\n else {\n return `${exports.CMD_FLOAT}${data} `;\n }\n }\n if (buffer_1.Buffer.isBuffer(data)) {\n const header = `${exports.CMD_BLOB}${data.length} `;\n return header + data.toString('utf-8');\n }\n if (data === null || data === undefined) {\n return `${exports.CMD_NULL} `;\n }\n if (Array.isArray(data)) {\n return serializeCommand(data, zeroString);\n }\n throw new Error(`Unsupported data type for serialization: ${typeof data}`);\n}\n\n\n//# sourceURL=webpack://sqlitecloud/./lib/drivers/protocol.js?");
74
74
 
75
75
  /***/ }),
76
76