@loaders.gl/csv 4.3.1 → 4.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/csv-loader.js +1 -1
- package/dist/dist.min.js +1 -1
- package/dist/index.cjs +1 -1
- package/dist/index.cjs.map +1 -1
- package/package.json +4 -4
package/dist/csv-loader.js
CHANGED
|
@@ -6,7 +6,7 @@ import Papa from "./papaparse/papaparse.js";
|
|
|
6
6
|
import AsyncIteratorStreamer from "./papaparse/async-iterator-streamer.js";
|
|
7
7
|
// __VERSION__ is injected by babel-plugin-version-inline
|
|
8
8
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
9
|
-
const VERSION = typeof "4.3.
|
|
9
|
+
const VERSION = typeof "4.3.1" !== 'undefined' ? "4.3.1" : 'latest';
|
|
10
10
|
const DEFAULT_CSV_SHAPE = 'object-row-table';
|
|
11
11
|
export const CSVLoader = {
|
|
12
12
|
dataType: null,
|
package/dist/dist.min.js
CHANGED
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
`&&r!=="\r"&&r!==`\r
|
|
14
14
|
`&&(r=`
|
|
15
15
|
`);var i=0,d=!1;this.parse=function(a,m,g){if(typeof a!="string")throw new Error("Input must be a string");var y=a.length,h=e.length,f=r.length,w=s.length,T=F(n);i=0;var p=[],S=[],b=[],v=0;if(!a)return R();if(c||c!==!1&&a.indexOf(u)===-1){for(var O=a.split(r),E=0;E<O.length;E++){let A=O[E];if(i+=A.length,E!==O.length-1)i+=r.length;else if(g)return R();if(!(s&&A.substr(0,w)===s)){if(T){if(p=[],Z(A.split(e)),U(),d)return R()}else Z(A.split(e));if(o&&E>=o)return p=p.slice(0,o),R(!0)}}return R()}for(var j=a.indexOf(e,i),C=a.indexOf(r,i),k=new RegExp(V(l)+V(u),"g"),_;;){if(a[i]===u){for(_=i,i++;;){if(_=a.indexOf(u,_+1),_===-1)return g||S.push({type:"Quotes",code:"MissingQuotes",message:"Quoted field unterminated",row:p.length,index:i}),ie();if(_===y-1){var B=a.substring(i,_).replace(k,u);return ie(B)}if(u===l&&a[_+1]===l){_++;continue}if(!(u!==l&&_!==0&&a[_-1]===l)){var oe=C===-1?j:Math.min(j,C),be=ve(oe);if(a[_+1+be]===e){if(b.push(a.substring(i,_).replace(k,u)),i=_+1+be+h,j=a.indexOf(e,i),C=a.indexOf(r,i),T&&(U(),d))return R();if(o&&p.length>=o)return R(!0);break}var _e=ve(C);if(a.substr(_+1+_e,f)===r){if(b.push(a.substring(i,_).replace(k,u)),Te(_+1+_e+f),j=a.indexOf(e,i),T&&(U(),d))return R();if(o&&p.length>=o)return R(!0);break}S.push({type:"Quotes",code:"InvalidQuotes",message:"Trailing quote on quoted field is malformed",row:p.length,index:i}),_++}}if(T&&(U(),d))return R();if(o&&p.length>=o)return R(!0);continue}if(s&&b.length===0&&a.substr(i,w)===s){if(C===-1)return R();i=C+f,C=a.indexOf(r,i),j=a.indexOf(e,i);continue}if(j!==-1&&(j<C||C===-1)){b.push(a.substring(i,j)),i=j+h,j=a.indexOf(e,i);continue}if(C!==-1){if(b.push(a.substring(i,C)),Te(C+f),T&&(U(),d))return R();if(o&&p.length>=o)return R(!0);continue}break}return ie();function Z(A){p.push(A),v=i}function ve(A){var Q=0;if(A!==-1){var q=a.substring(_+1,A);q&&q.trim()===""&&(Q=q.length)}return Q}function ie(A){return g||(typeof A>"u"&&(A=a.substr(i)),b.push(A),i=y,Z(b),T&&U()),R()}function Te(A){i=A,Z(b),b=[],C=a.indexOf(r,i)}function R(A,Q){var q=Q||!1;return{data:q?p[0]:p,errors:S,meta:{delimiter:e,linebreak:r,aborted:d,truncated:!!A,cursor:v+(m||0)}}}function U(){n(R(void 0,!0)),p=[],S=[]}},this.abort=function(){d=!0},this.getCharIndex=function(){return i}}function Se(t){if(typeof t!="object"||t===null)return t;var e=Array.isArray(t)?[]:{};for(var r in t)e[r]=Se(t[r]);return e}function F(t){return typeof t=="function"}var D={parse:Ge,unparse:et,RECORD_SEP:String.fromCharCode(30),UNIT_SEP:String.fromCharCode(31),BYTE_ORDER_MARK:Fe,BAD_DELIMITERS:["\r",`
|
|
16
|
-
`,'"',Fe],WORKERS_SUPPORTED:!1,NODE_STREAM_INPUT:1,LocalChunkSize:1024*1024*10,RemoteChunkSize:1024*1024*5,DefaultDelimiter:",",Parser:fe,ParserHandle:ae,ChunkStreamer:se},I=D;var{ChunkStreamer:at}=I,W=class extends at{textDecoder=new TextDecoder(this._config.encoding);constructor(e={}){super(e)}async stream(e){this._input=e;try{for await(let r of e)this.parseChunk(this.getStringChunk(r));this._finished=!0,this.parseChunk("")}catch(r){this._sendError(r)}}_nextChunk(){}getStringChunk(e){return typeof e=="string"?e:this.textDecoder.decode(e,{stream:!0})}};var nt="4.3.
|
|
16
|
+
`,'"',Fe],WORKERS_SUPPORTED:!1,NODE_STREAM_INPUT:1,LocalChunkSize:1024*1024*10,RemoteChunkSize:1024*1024*5,DefaultDelimiter:",",Parser:fe,ParserHandle:ae,ChunkStreamer:se},I=D;var{ChunkStreamer:at}=I,W=class extends at{textDecoder=new TextDecoder(this._config.encoding);constructor(e={}){super(e)}async stream(e){this._input=e;try{for await(let r of e)this.parseChunk(this.getStringChunk(r));this._finished=!0,this.parseChunk("")}catch(r){this._sendError(r)}}_nextChunk(){}getStringChunk(e){return typeof e=="string"?e:this.textDecoder.decode(e,{stream:!0})}};var nt="4.3.1",de="object-row-table",ne={dataType:null,batchType:null,id:"csv",module:"csv",name:"CSV",version:nt,extensions:["csv","tsv","dsv"],mimeTypes:["text/csv","text/tab-separated-values","text/dsv"],category:"table",parse:async(t,e)=>De(new TextDecoder().decode(t),e),parseText:(t,e)=>De(t,e),parseInBatches:ot,options:{csv:{shape:de,optimizeMemoryUsage:!1,header:"auto",columnPrefix:"column",quoteChar:'"',escapeChar:'"',dynamicTyping:!0,comments:!1,skipEmptyLines:!0,delimitersToGuess:[","," ","|",";"]}}};async function De(t,e){let r={...ne.options.csv,...e?.csv},s=it(t),o=r.header==="auto"?ke(s):Boolean(r.header),c={...r,header:o,download:!1,transformHeader:o?Be():void 0,error:a=>{throw new Error(a)}},u=I.parse(t,c),l=u.data,i=u.meta.fields||Ne(r.columnPrefix,s.length),d=r.shape||de;switch(d){case"object-row-table":return{shape:"object-row-table",data:l.map(a=>Array.isArray(a)?H(a,i):a)};case"array-row-table":return{shape:"array-row-table",data:l.map(a=>Array.isArray(a)?a:z(a,i))};default:throw new Error(d)}}function ot(t,e){e={...e},e.batchSize==="auto"&&(e.batchSize=4e3);let r={...ne.options.csv,...e?.csv},s=new M,n=!0,o=null,c=null,u=null,l={...r,header:!1,download:!1,chunkSize:1024*1024*5,skipEmptyLines:!1,step(i){let d=i.data;if(r.skipEmptyLines&&d.flat().join("").trim()==="")return;let a=i.meta.cursor;if(n&&!o&&(r.header==="auto"?ke(d):Boolean(r.header))){o=d.map(Be());return}n&&(n=!1,o||(o=Ne(r.columnPrefix,d.length)),u=ht(d,o)),r.optimizeMemoryUsage&&(d=JSON.parse(JSON.stringify(d)));let m=r.shape||de;c=c||new L(u,{shape:m,...e});try{c.addRow(d);let g=c&&c.getFullBatch({bytesUsed:a});g&&s.enqueue(g)}catch(g){s.enqueue(g)}},complete(i){try{let d=i.meta.cursor,a=c&&c.getFinalBatch({bytesUsed:d});a&&s.enqueue(a)}catch(d){s.enqueue(d)}s.close()}};return I.parse(t,l,W),s}function ke(t){return t&&t.every(e=>typeof e=="string")}function it(t){return I.parse(t,{dynamicTyping:!0,preview:1}).data[0]}function Be(){let t=new Set;return e=>{let r=e,s=1;for(;t.has(r);)r=`${e}.${s}`,s++;return t.add(r),r}}function Ne(t,e=0){let r=[];for(let s=0;s<e;s++)r.push(`${t}${s+1}`);return r}function ht(t,e){let r=e?{}:[];for(let s=0;s<t.length;s++){let n=e&&e[s]||s;switch(typeof t[s]){case"number":case"boolean":r[n]={name:String(n),index:s,type:Float32Array};break;case"string":default:r[n]={name:String(n),index:s,type:Array}}}return r}var Ue={},me={},pe=34,J=10,ge=13;function Me(t){return new Function("d","return {"+t.map(function(e,r){return JSON.stringify(e)+": d["+r+'] || ""'}).join(",")+"}")}function ct(t,e){var r=Me(t);return function(s,n){return e(r(s),n,t)}}function Le(t){var e=Object.create(null),r=[];return t.forEach(function(s){for(var n in s)n in e||r.push(e[n]=n)}),r}function x(t,e){var r=t+"",s=r.length;return s<e?new Array(e-s+1).join(0)+r:r}function lt(t){return t<0?"-"+x(-t,6):t>9999?"+"+x(t,6):x(t,4)}function ut(t){var e=t.getUTCHours(),r=t.getUTCMinutes(),s=t.getUTCSeconds(),n=t.getUTCMilliseconds();return isNaN(t)?"Invalid Date":lt(t.getUTCFullYear(),4)+"-"+x(t.getUTCMonth()+1,2)+"-"+x(t.getUTCDate(),2)+(n?"T"+x(e,2)+":"+x(r,2)+":"+x(s,2)+"."+x(n,3)+"Z":s?"T"+x(e,2)+":"+x(r,2)+":"+x(s,2)+"Z":r||e?"T"+x(e,2)+":"+x(r,2)+"Z":"")}function Ie(t){var e=new RegExp('["'+t+`
|
|
17
17
|
\r]`),r=t.charCodeAt(0);function s(a,m){var g,y,h=n(a,function(f,w){if(g)return g(f,w-1);y=f,g=m?ct(f,m):Me(f)});return h.columns=y||[],h}function n(a,m){var g=[],y=a.length,h=0,f=0,w,T=y<=0,p=!1;a.charCodeAt(y-1)===J&&--y,a.charCodeAt(y-1)===ge&&--y;function S(){if(T)return me;if(p)return p=!1,Ue;var v,O=h,E;if(a.charCodeAt(O)===pe){for(;h++<y&&a.charCodeAt(h)!==pe||a.charCodeAt(++h)===pe;);return(v=h)>=y?T=!0:(E=a.charCodeAt(h++))===J?p=!0:E===ge&&(p=!0,a.charCodeAt(h)===J&&++h),a.slice(O+1,v-1).replace(/""/g,'"')}for(;h<y;){if((E=a.charCodeAt(v=h++))===J)p=!0;else if(E===ge)p=!0,a.charCodeAt(h)===J&&++h;else if(E!==r)continue;return a.slice(O,v)}return T=!0,a.slice(O,y)}for(;(w=S())!==me;){for(var b=[];w!==Ue&&w!==me;)b.push(w),w=S();m&&(b=m(b,f++))==null||g.push(b)}return g}function o(a,m){return a.map(function(g){return m.map(function(y){return d(g[y])}).join(t)})}function c(a,m){return m==null&&(m=Le(a)),[m.map(d).join(t)].concat(o(a,m)).join(`
|
|
18
18
|
`)}function u(a,m){return m==null&&(m=Le(a)),o(a,m).join(`
|
|
19
19
|
`)}function l(a){return a.map(i).join(`
|
package/dist/index.cjs
CHANGED
|
@@ -832,7 +832,7 @@ var AsyncIteratorStreamer = class extends ChunkStreamer2 {
|
|
|
832
832
|
};
|
|
833
833
|
|
|
834
834
|
// dist/csv-loader.js
|
|
835
|
-
var VERSION = true ? "4.3.
|
|
835
|
+
var VERSION = true ? "4.3.1" : "latest";
|
|
836
836
|
var DEFAULT_CSV_SHAPE = "object-row-table";
|
|
837
837
|
var CSVLoader = {
|
|
838
838
|
dataType: null,
|
package/dist/index.cjs.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["index.js", "csv-loader.js", "papaparse/papaparse.js", "papaparse/async-iterator-streamer.js", "lib/encoders/encode-csv.js", "csv-writer.js"],
|
|
4
|
-
"sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport { CSVLoader } from \"./csv-loader.js\";\nexport { CSVWriter } from \"./csv-writer.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { AsyncQueue, TableBatchBuilder, convertToArrayRow, convertToObjectRow } from '@loaders.gl/schema';\nimport Papa from \"./papaparse/papaparse.js\";\nimport AsyncIteratorStreamer from \"./papaparse/async-iterator-streamer.js\";\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.0\" !== 'undefined' ? \"4.3.0\" : 'latest';\nconst DEFAULT_CSV_SHAPE = 'object-row-table';\nexport const CSVLoader = {\n dataType: null,\n batchType: null,\n id: 'csv',\n module: 'csv',\n name: 'CSV',\n version: VERSION,\n extensions: ['csv', 'tsv', 'dsv'],\n mimeTypes: ['text/csv', 'text/tab-separated-values', 'text/dsv'],\n category: 'table',\n parse: async (arrayBuffer, options) => parseCSV(new TextDecoder().decode(arrayBuffer), options),\n parseText: (text, options) => parseCSV(text, options),\n parseInBatches: parseCSVInBatches,\n // @ts-ignore\n // testText: null,\n options: {\n csv: {\n shape: DEFAULT_CSV_SHAPE, // 'object-row-table'\n optimizeMemoryUsage: false,\n // CSV options\n header: 'auto',\n columnPrefix: 'column',\n // delimiter: auto\n // newline: auto\n quoteChar: '\"',\n escapeChar: '\"',\n dynamicTyping: true,\n comments: false,\n skipEmptyLines: true,\n // transform: null?\n delimitersToGuess: [',', '\\t', '|', ';']\n // fastMode: auto\n }\n }\n};\nasync function parseCSV(csvText, options) {\n // Apps can call the parse method directly, we so apply default options here\n const csvOptions = { ...CSVLoader.options.csv, ...options?.csv };\n const firstRow = readFirstRow(csvText);\n const header = csvOptions.header === 'auto' ? isHeaderRow(firstRow) : Boolean(csvOptions.header);\n const parseWithHeader = header;\n const papaparseConfig = {\n // dynamicTyping: true,\n ...csvOptions,\n header: parseWithHeader,\n download: false, // We handle loading, no need for papaparse to do it for us\n transformHeader: parseWithHeader ? duplicateColumnTransformer() : undefined,\n error: (e) => {\n throw new Error(e);\n }\n };\n const result = Papa.parse(csvText, papaparseConfig);\n const rows = result.data;\n const headerRow = result.meta.fields || generateHeader(csvOptions.columnPrefix, firstRow.length);\n const shape = csvOptions.shape || DEFAULT_CSV_SHAPE;\n switch (shape) {\n case 'object-row-table':\n return {\n shape: 'object-row-table',\n data: rows.map((row) => (Array.isArray(row) ? convertToObjectRow(row, headerRow) : row))\n };\n case 'array-row-table':\n return {\n shape: 'array-row-table',\n data: rows.map((row) => (Array.isArray(row) ? row : convertToArrayRow(row, headerRow)))\n };\n default:\n throw new Error(shape);\n }\n}\n// TODO - support batch size 0 = no batching/single batch?\nfunction parseCSVInBatches(asyncIterator, options) {\n // Papaparse does not support standard batch size handling\n // TODO - investigate papaparse chunks mode\n options = { ...options };\n if (options.batchSize === 'auto') {\n options.batchSize = 4000;\n }\n // Apps can call the parse method directly, we so apply default options here\n const csvOptions = { ...CSVLoader.options.csv, ...options?.csv };\n const asyncQueue = new AsyncQueue();\n let isFirstRow = true;\n let headerRow = null;\n let tableBatchBuilder = null;\n let schema = null;\n const config = {\n // dynamicTyping: true, // Convert numbers and boolean values in rows from strings,\n ...csvOptions,\n header: false, // Unfortunately, header detection is not automatic and does not infer shapes\n download: false, // We handle loading, no need for papaparse to do it for us\n // chunkSize is set to 5MB explicitly (same as Papaparse default) due to a bug where the\n // streaming parser gets stuck if skipEmptyLines and a step callback are both supplied.\n // See https://github.com/mholt/PapaParse/issues/465\n chunkSize: 1024 * 1024 * 5,\n // skipEmptyLines is set to a boolean value if supplied. Greedy is set to true\n // skipEmptyLines is handled manually given two bugs where the streaming parser gets stuck if\n // both of the skipEmptyLines and step callback options are provided:\n // - true doesn't work unless chunkSize is set: https://github.com/mholt/PapaParse/issues/465\n // - greedy doesn't work: https://github.com/mholt/PapaParse/issues/825\n skipEmptyLines: false,\n // step is called on every row\n // eslint-disable-next-line complexity, max-statements\n step(results) {\n let row = results.data;\n if (csvOptions.skipEmptyLines) {\n // Manually reject lines that are empty\n const collapsedRow = row.flat().join('').trim();\n if (collapsedRow === '') {\n return;\n }\n }\n const bytesUsed = results.meta.cursor;\n // Check if we need to save a header row\n if (isFirstRow && !headerRow) {\n // Auto detects or can be forced with csvOptions.header\n const header = csvOptions.header === 'auto' ? isHeaderRow(row) : Boolean(csvOptions.header);\n if (header) {\n headerRow = row.map(duplicateColumnTransformer());\n return;\n }\n }\n // If first data row, we can deduce the schema\n if (isFirstRow) {\n isFirstRow = false;\n if (!headerRow) {\n headerRow = generateHeader(csvOptions.columnPrefix, row.length);\n }\n schema = deduceSchema(row, headerRow);\n }\n if (csvOptions.optimizeMemoryUsage) {\n // A workaround to allocate new strings and don't retain pointers to original strings.\n // https://bugs.chromium.org/p/v8/issues/detail?id=2869\n row = JSON.parse(JSON.stringify(row));\n }\n const shape = csvOptions.shape || DEFAULT_CSV_SHAPE;\n // Add the row\n tableBatchBuilder =\n tableBatchBuilder ||\n new TableBatchBuilder(\n // @ts-expect-error TODO this is not a proper schema\n schema, {\n shape,\n ...options\n });\n try {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder && tableBatchBuilder.getFullBatch({ bytesUsed });\n if (batch) {\n asyncQueue.enqueue(batch);\n }\n }\n catch (error) {\n asyncQueue.enqueue(error);\n }\n },\n // complete is called when all rows have been read\n complete(results) {\n try {\n const bytesUsed = results.meta.cursor;\n // Ensure any final (partial) batch gets emitted\n const batch = tableBatchBuilder && tableBatchBuilder.getFinalBatch({ bytesUsed });\n if (batch) {\n asyncQueue.enqueue(batch);\n }\n }\n catch (error) {\n asyncQueue.enqueue(error);\n }\n asyncQueue.close();\n }\n };\n Papa.parse(asyncIterator, config, AsyncIteratorStreamer);\n // TODO - Does it matter if we return asyncIterable or asyncIterator\n // return asyncQueue[Symbol.asyncIterator]();\n return asyncQueue;\n}\n/**\n * Checks if a certain row is a header row\n * @param row the row to check\n * @returns true if the row looks like a header\n */\nfunction isHeaderRow(row) {\n return row && row.every((value) => typeof value === 'string');\n}\n/**\n * Reads, parses, and returns the first row of a CSV text\n * @param csvText the csv text to parse\n * @returns the first row\n */\nfunction readFirstRow(csvText) {\n const result = Papa.parse(csvText, {\n dynamicTyping: true,\n preview: 1\n });\n return result.data[0];\n}\n/**\n * Creates a transformer that renames duplicate columns. This is needed as Papaparse doesn't handle\n * duplicate header columns and would use the latest occurrence by default.\n * See the header option in https://www.papaparse.com/docs#config\n * @returns a transform function that returns sanitized names for duplicate fields\n */\nfunction duplicateColumnTransformer() {\n const observedColumns = new Set();\n return (col) => {\n let colName = col;\n let counter = 1;\n while (observedColumns.has(colName)) {\n colName = `${col}.${counter}`;\n counter++;\n }\n observedColumns.add(colName);\n return colName;\n };\n}\n/**\n * Generates the header of a CSV given a prefix and a column count\n * @param columnPrefix the columnPrefix to use\n * @param count the count of column names to generate\n * @returns an array of column names\n */\nfunction generateHeader(columnPrefix, count = 0) {\n const headers = [];\n for (let i = 0; i < count; i++) {\n headers.push(`${columnPrefix}${i + 1}`);\n }\n return headers;\n}\nfunction deduceSchema(row, headerRow) {\n const schema = headerRow ? {} : [];\n for (let i = 0; i < row.length; i++) {\n const columnName = (headerRow && headerRow[i]) || i;\n const value = row[i];\n switch (typeof value) {\n case 'number':\n case 'boolean':\n // TODO - booleans could be handled differently...\n schema[columnName] = { name: String(columnName), index: i, type: Float32Array };\n break;\n case 'string':\n default:\n schema[columnName] = { name: String(columnName), index: i, type: Array };\n // We currently only handle numeric rows\n // TODO we could offer a function to map strings to numbers?\n }\n }\n return schema;\n}\n", "// This is a fork of papaparse\n// https://github.com/mholt/PapaParse\n/* @license\nPapa Parse\nv5.0.0-beta.0\nhttps://github.com/mholt/PapaParse\nLicense: MIT\n*/\n// FORK SUMMARY:\n// - Adopt ES6 exports\n// - Implement new AsyncIteratorStreamer\n// - Remove non Async Iterator streamers (can all be handled by new streamer)\n// - Remove unused Worker support (loaders.gl worker system used instead)\n// - Remove unused jQuery plugin support\n// const defaultConfig: Required<CSVParserConfig> = {\n// dynamicTyping: false,\n// dynamicTypingFunction: undefined!,\n// transform: false\n// };\n/* eslint-disable */\nconst BYTE_ORDER_MARK = '\\ufeff';\nfunction CsvToJson(_input, _config = {}, Streamer = StringStreamer) {\n _config = _config || {};\n var dynamicTyping = _config.dynamicTyping || false;\n if (isFunction(dynamicTyping)) {\n _config.dynamicTypingFunction = dynamicTyping;\n // Will be filled on first row call\n dynamicTyping = {};\n }\n _config.dynamicTyping = dynamicTyping;\n _config.transform = isFunction(_config.transform) ? _config.transform : false;\n var streamer = new Streamer(_config);\n return streamer.stream(_input);\n}\nfunction JsonToCsv(_input, _config) {\n // Default configuration\n /** whether to surround every datum with quotes */\n var _quotes = false;\n /** whether to write headers */\n var _writeHeader = true;\n /** delimiting character(s) */\n var _delimiter = ',';\n /** newline character(s) */\n var _newline = '\\r\\n';\n /** quote character */\n var _quoteChar = '\"';\n /** escaped quote character, either \"\" or <config.escapeChar>\" */\n var _escapedQuote = _quoteChar + _quoteChar;\n /** whether to skip empty lines */\n var _skipEmptyLines = false;\n /** the columns (keys) we expect when we unparse objects */\n var _columns = null;\n unpackConfig();\n var quoteCharRegex = new RegExp(escapeRegExp(_quoteChar), 'g');\n if (typeof _input === 'string')\n _input = JSON.parse(_input);\n if (Array.isArray(_input)) {\n if (!_input.length || Array.isArray(_input[0]))\n return serialize(null, _input, _skipEmptyLines);\n else if (typeof _input[0] === 'object')\n return serialize(_columns || Object.keys(_input[0]), _input, _skipEmptyLines);\n }\n else if (typeof _input === 'object') {\n if (typeof _input.data === 'string')\n _input.data = JSON.parse(_input.data);\n if (Array.isArray(_input.data)) {\n if (!_input.fields)\n _input.fields = _input.meta && _input.meta.fields;\n if (!_input.fields)\n _input.fields = Array.isArray(_input.data[0]) ? _input.fields : Object.keys(_input.data[0]);\n if (!Array.isArray(_input.data[0]) && typeof _input.data[0] !== 'object')\n _input.data = [_input.data]; // handles input like [1,2,3] or ['asdf']\n }\n return serialize(_input.fields || [], _input.data || [], _skipEmptyLines);\n }\n // Default (any valid paths should return before this)\n throw new Error('Unable to serialize unrecognized input');\n function unpackConfig() {\n if (typeof _config !== 'object')\n return;\n if (typeof _config.delimiter === 'string' &&\n !Papa.BAD_DELIMITERS.filter(function (value) {\n return _config.delimiter.indexOf(value) !== -1;\n }).length) {\n _delimiter = _config.delimiter;\n }\n if (typeof _config.quotes === 'boolean' || Array.isArray(_config.quotes))\n _quotes = _config.quotes;\n if (typeof _config.skipEmptyLines === 'boolean' || typeof _config.skipEmptyLines === 'string')\n _skipEmptyLines = _config.skipEmptyLines;\n if (typeof _config.newline === 'string')\n _newline = _config.newline;\n if (typeof _config.quoteChar === 'string')\n _quoteChar = _config.quoteChar;\n if (typeof _config.header === 'boolean')\n _writeHeader = _config.header;\n if (Array.isArray(_config.columns)) {\n if (_config.columns.length === 0)\n throw new Error('Option columns is empty');\n _columns = _config.columns;\n }\n if (_config.escapeChar !== undefined) {\n _escapedQuote = _config.escapeChar + _quoteChar;\n }\n }\n /** The double for loop that iterates the data and writes out a CSV string including header row */\n function serialize(fields, data, skipEmptyLines) {\n var csv = '';\n if (typeof fields === 'string')\n fields = JSON.parse(fields);\n if (typeof data === 'string')\n data = JSON.parse(data);\n var hasHeader = Array.isArray(fields) && fields.length > 0;\n var dataKeyedByField = !Array.isArray(data[0]);\n // If there a header row, write it first\n if (hasHeader && _writeHeader) {\n for (var i = 0; i < fields.length; i++) {\n if (i > 0)\n csv += _delimiter;\n csv += safe(fields[i], i);\n }\n if (data.length > 0)\n csv += _newline;\n }\n // Then write out the data\n for (var row = 0; row < data.length; row++) {\n var maxCol = hasHeader ? fields.length : data[row].length;\n var emptyLine = false;\n var nullLine = hasHeader ? Object.keys(data[row]).length === 0 : data[row].length === 0;\n if (skipEmptyLines && !hasHeader) {\n emptyLine =\n skipEmptyLines === 'greedy'\n ? data[row].join('').trim() === ''\n : data[row].length === 1 && data[row][0].length === 0;\n }\n if (skipEmptyLines === 'greedy' && hasHeader) {\n var line = [];\n for (var c = 0; c < maxCol; c++) {\n var cx = dataKeyedByField ? fields[c] : c;\n line.push(data[row][cx]);\n }\n emptyLine = line.join('').trim() === '';\n }\n if (!emptyLine) {\n for (var col = 0; col < maxCol; col++) {\n if (col > 0 && !nullLine)\n csv += _delimiter;\n var colIdx = hasHeader && dataKeyedByField ? fields[col] : col;\n csv += safe(data[row][colIdx], col);\n }\n if (row < data.length - 1 && (!skipEmptyLines || (maxCol > 0 && !nullLine))) {\n csv += _newline;\n }\n }\n }\n return csv;\n }\n /** Encloses a value around quotes if needed (makes a value safe for CSV insertion) */\n function safe(str, col) {\n if (typeof str === 'undefined' || str === null)\n return '';\n if (str.constructor === Date)\n return JSON.stringify(str).slice(1, 25);\n str = str.toString().replace(quoteCharRegex, _escapedQuote);\n var needsQuotes = (typeof _quotes === 'boolean' && _quotes) ||\n (Array.isArray(_quotes) && _quotes[col]) ||\n hasAny(str, Papa.BAD_DELIMITERS) ||\n str.indexOf(_delimiter) > -1 ||\n str.charAt(0) === ' ' ||\n str.charAt(str.length - 1) === ' ';\n return needsQuotes ? _quoteChar + str + _quoteChar : str;\n }\n function hasAny(str, substrings) {\n for (var i = 0; i < substrings.length; i++)\n if (str.indexOf(substrings[i]) > -1)\n return true;\n return false;\n }\n}\n/** ChunkStreamer is the base prototype for various streamer implementations. */\nclass ChunkStreamer {\n _handle;\n _config;\n _finished = false;\n _completed = false;\n _input = null;\n _baseIndex = 0;\n _partialLine = '';\n _rowCount = 0;\n _start = 0;\n isFirstChunk = true;\n _completeResults = {\n data: [],\n errors: [],\n meta: {}\n };\n constructor(config) {\n // Deep-copy the config so we can edit it\n var configCopy = { ...config };\n // @ts-expect-error\n configCopy.chunkSize = parseInt(configCopy.chunkSize); // parseInt VERY important so we don't concatenate strings!\n if (!config.step && !config.chunk) {\n configCopy.chunkSize = null; // disable Range header if not streaming; bad values break IIS - see issue #196\n }\n this._handle = new ParserHandle(configCopy);\n this._handle.streamer = this;\n this._config = configCopy; // persist the copy to the caller\n }\n parseChunk(chunk, isFakeChunk) {\n // First chunk pre-processing\n if (this.isFirstChunk && isFunction(this._config.beforeFirstChunk)) {\n var modifiedChunk = this._config.beforeFirstChunk(chunk);\n if (modifiedChunk !== undefined)\n chunk = modifiedChunk;\n }\n this.isFirstChunk = false;\n // Rejoin the line we likely just split in two by chunking the file\n var aggregate = this._partialLine + chunk;\n this._partialLine = '';\n var results = this._handle.parse(aggregate, this._baseIndex, !this._finished);\n if (this._handle.paused() || this._handle.aborted())\n return;\n var lastIndex = results.meta.cursor;\n if (!this._finished) {\n this._partialLine = aggregate.substring(lastIndex - this._baseIndex);\n this._baseIndex = lastIndex;\n }\n if (results && results.data)\n this._rowCount += results.data.length;\n var finishedIncludingPreview = this._finished || (this._config.preview && this._rowCount >= this._config.preview);\n if (isFunction(this._config.chunk) && !isFakeChunk) {\n this._config.chunk(results, this._handle);\n if (this._handle.paused() || this._handle.aborted())\n return;\n results = undefined;\n // @ts-expect-error\n this._completeResults = undefined;\n }\n if (!this._config.step && !this._config.chunk) {\n this._completeResults.data = this._completeResults.data.concat(results.data);\n this._completeResults.errors = this._completeResults.errors.concat(results.errors);\n this._completeResults.meta = results.meta;\n }\n if (!this._completed &&\n finishedIncludingPreview &&\n isFunction(this._config.complete) &&\n (!results || !results.meta.aborted)) {\n this._config.complete(this._completeResults, this._input);\n this._completed = true;\n }\n // if (!finishedIncludingPreview && (!results || !results.meta.paused)) this._nextChunk();\n return results;\n }\n _sendError(error) {\n if (isFunction(this._config.error))\n this._config.error(error);\n }\n}\nclass StringStreamer extends ChunkStreamer {\n remaining;\n constructor(config = {}) {\n super(config);\n }\n stream(s) {\n this.remaining = s;\n return this._nextChunk();\n }\n _nextChunk() {\n if (this._finished)\n return;\n var size = this._config.chunkSize;\n var chunk = size ? this.remaining.substr(0, size) : this.remaining;\n this.remaining = size ? this.remaining.substr(size) : '';\n this._finished = !this.remaining;\n return this.parseChunk(chunk);\n }\n}\nconst FLOAT = /^\\s*-?(\\d*\\.?\\d+|\\d+\\.?\\d*)(e[-+]?\\d+)?\\s*$/i;\nconst ISO_DATE = /(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))/;\n// Use one ParserHandle per entire CSV file or string\nclass ParserHandle {\n _config;\n /** Number of times step was called (number of rows parsed) */\n _stepCounter = 0;\n /** Number of rows that have been parsed so far */\n _rowCounter = 0;\n /** The input being parsed */\n _input;\n /** The core parser being used */\n _parser;\n /** Whether we are paused or not */\n _paused = false;\n /** Whether the parser has aborted or not */\n _aborted = false;\n /** Temporary state between delimiter detection and processing results */\n _delimiterError = false;\n /** Fields are from the header row of the input, if there is one */\n _fields = [];\n /** The last results returned from the parser */\n _results = {\n data: [],\n errors: [],\n meta: {}\n };\n constructor(_config) {\n // One goal is to minimize the use of regular expressions...\n if (isFunction(_config.step)) {\n var userStep = _config.step;\n _config.step = (results) => {\n this._results = results;\n if (this.needsHeaderRow()) {\n this.processResults();\n }\n // only call user's step function after header row\n else {\n this.processResults();\n // It's possbile that this line was empty and there's no row here after all\n if (!this._results.data || this._results.data.length === 0)\n return;\n this._stepCounter += results.data.length;\n if (_config.preview && this._stepCounter > _config.preview) {\n this._parser.abort();\n }\n else {\n userStep(this._results, this);\n }\n }\n };\n }\n this._config = _config;\n }\n /**\n * Parses input. Most users won't need, and shouldn't mess with, the baseIndex\n * and ignoreLastRow parameters. They are used by streamers (wrapper functions)\n * when an input comes in multiple chunks, like from a file.\n */\n parse(input, baseIndex, ignoreLastRow) {\n var quoteChar = this._config.quoteChar || '\"';\n if (!this._config.newline)\n this._config.newline = guessLineEndings(input, quoteChar);\n this._delimiterError = false;\n if (!this._config.delimiter) {\n var delimGuess = this.guessDelimiter(input, this._config.newline, this._config.skipEmptyLines, this._config.comments, this._config.delimitersToGuess);\n if (delimGuess.successful) {\n this._config.delimiter = delimGuess.bestDelimiter;\n }\n else {\n this._delimiterError = true; // add error after parsing (otherwise it would be overwritten)\n this._config.delimiter = Papa.DefaultDelimiter;\n }\n this._results.meta.delimiter = this._config.delimiter;\n }\n else if (isFunction(this._config.delimiter)) {\n this._config.delimiter = this._config.delimiter(input);\n this._results.meta.delimiter = this._config.delimiter;\n }\n var parserConfig = copy(this._config);\n if (this._config.preview && this._config.header)\n parserConfig.preview++; // to compensate for header row\n this._input = input;\n this._parser = new Parser(parserConfig);\n this._results = this._parser.parse(this._input, baseIndex, ignoreLastRow);\n this.processResults();\n return this._paused ? { meta: { paused: true } } : this._results || { meta: { paused: false } };\n }\n paused() {\n return this._paused;\n }\n pause() {\n this._paused = true;\n this._parser.abort();\n this._input = this._input.substr(this._parser.getCharIndex());\n }\n resume() {\n this._paused = false;\n // @ts-expect-error\n this.streamer.parseChunk(this._input, true);\n }\n aborted() {\n return this._aborted;\n }\n abort() {\n this._aborted = true;\n this._parser.abort();\n this._results.meta.aborted = true;\n if (isFunction(this._config.complete)) {\n this._config.complete(this._results);\n }\n this._input = '';\n }\n testEmptyLine(s) {\n return this._config.skipEmptyLines === 'greedy'\n ? s.join('').trim() === ''\n : s.length === 1 && s[0].length === 0;\n }\n processResults() {\n if (this._results && this._delimiterError) {\n this.addError('Delimiter', 'UndetectableDelimiter', \"Unable to auto-detect delimiting character; defaulted to '\" + Papa.DefaultDelimiter + \"'\");\n this._delimiterError = false;\n }\n if (this._config.skipEmptyLines) {\n for (var i = 0; i < this._results.data.length; i++)\n if (this.testEmptyLine(this._results.data[i]))\n this._results.data.splice(i--, 1);\n }\n if (this.needsHeaderRow()) {\n this.fillHeaderFields();\n }\n return this.applyHeaderAndDynamicTypingAndTransformation();\n }\n needsHeaderRow() {\n return this._config.header && this._fields.length === 0;\n }\n fillHeaderFields() {\n if (!this._results)\n return;\n const addHeder = (header) => {\n if (isFunction(this._config.transformHeader))\n header = this._config.transformHeader(header);\n this._fields.push(header);\n };\n if (Array.isArray(this._results.data[0])) {\n for (var i = 0; this.needsHeaderRow() && i < this._results.data.length; i++)\n this._results.data[i].forEach(addHeder);\n this._results.data.splice(0, 1);\n }\n // if _results.data[0] is not an array, we are in a step where _results.data is the row.\n else {\n this._results.data.forEach(addHeder);\n }\n }\n shouldApplyDynamicTyping(field) {\n // Cache function values to avoid calling it for each row\n if (this._config.dynamicTypingFunction && this._config.dynamicTyping[field] === undefined) {\n this._config.dynamicTyping[field] = this._config.dynamicTypingFunction(field);\n }\n return (this._config.dynamicTyping[field] || this._config.dynamicTyping) === true;\n }\n parseDynamic(field, value) {\n if (this.shouldApplyDynamicTyping(field)) {\n if (value === 'true' || value === 'TRUE')\n return true;\n else if (value === 'false' || value === 'FALSE')\n return false;\n else if (FLOAT.test(value))\n return parseFloat(value);\n else if (ISO_DATE.test(value))\n return new Date(value);\n else\n return value === '' ? null : value;\n }\n return value;\n }\n applyHeaderAndDynamicTypingAndTransformation() {\n if (!this._results ||\n !this._results.data ||\n (!this._config.header && !this._config.dynamicTyping && !this._config.transform)) {\n return this._results;\n }\n var incrementBy = 1;\n if (!this._results.data[0] || Array.isArray(this._results.data[0])) {\n this._results.data = this._results.data.map(this.processRow.bind(this));\n incrementBy = this._results.data.length;\n }\n else {\n // @ts-expect-error\n this._results.data = this.processRow(this._results.data, 0);\n }\n if (this._config.header && this._results.meta)\n this._results.meta.fields = this._fields;\n this._rowCounter += incrementBy;\n return this._results;\n }\n processRow(rowSource, i) {\n var row = this._config.header ? {} : [];\n var j;\n for (j = 0; j < rowSource.length; j++) {\n var field = j;\n var value = rowSource[j];\n if (this._config.header)\n field = j >= this._fields.length ? '__parsed_extra' : this._fields[j];\n if (this._config.transform)\n value = this._config.transform(value, field);\n value = this.parseDynamic(field, value);\n if (field === '__parsed_extra') {\n row[field] = row[field] || [];\n row[field].push(value);\n }\n else\n row[field] = value;\n }\n if (this._config.header) {\n if (j > this._fields.length)\n this.addError('FieldMismatch', 'TooManyFields', 'Too many fields: expected ' + this._fields.length + ' fields but parsed ' + j, this._rowCounter + i);\n else if (j < this._fields.length)\n this.addError('FieldMismatch', 'TooFewFields', 'Too few fields: expected ' + this._fields.length + ' fields but parsed ' + j, this._rowCounter + i);\n }\n return row;\n }\n guessDelimiter(input, newline, skipEmptyLines, comments, delimitersToGuess) {\n var bestDelim, bestDelta, fieldCountPrevRow;\n delimitersToGuess = delimitersToGuess || [',', '\\t', '|', ';', Papa.RECORD_SEP, Papa.UNIT_SEP];\n for (var i = 0; i < delimitersToGuess.length; i++) {\n var delim = delimitersToGuess[i];\n var delta = 0, avgFieldCount = 0, emptyLinesCount = 0;\n fieldCountPrevRow = undefined;\n var preview = new Parser({\n comments: comments,\n delimiter: delim,\n newline: newline,\n preview: 10\n }).parse(input);\n for (var j = 0; j < preview.data.length; j++) {\n if (skipEmptyLines && this.testEmptyLine(preview.data[j])) {\n emptyLinesCount++;\n continue;\n }\n var fieldCount = preview.data[j].length;\n avgFieldCount += fieldCount;\n if (typeof fieldCountPrevRow === 'undefined') {\n fieldCountPrevRow = 0;\n continue;\n }\n else if (fieldCount > 1) {\n delta += Math.abs(fieldCount - fieldCountPrevRow);\n fieldCountPrevRow = fieldCount;\n }\n }\n if (preview.data.length > 0)\n avgFieldCount /= preview.data.length - emptyLinesCount;\n if ((typeof bestDelta === 'undefined' || delta > bestDelta) && avgFieldCount > 1.99) {\n bestDelta = delta;\n bestDelim = delim;\n }\n }\n this._config.delimiter = bestDelim;\n return {\n successful: !!bestDelim,\n bestDelimiter: bestDelim\n };\n }\n addError(type, code, msg, row) {\n this._results.errors.push({\n type: type,\n code: code,\n message: msg,\n row: row\n });\n }\n}\nfunction guessLineEndings(input, quoteChar) {\n input = input.substr(0, 1024 * 1024); // max length 1 MB\n // Replace all the text inside quotes\n var re = new RegExp(escapeRegExp(quoteChar) + '([^]*?)' + escapeRegExp(quoteChar), 'gm');\n input = input.replace(re, '');\n var r = input.split('\\r');\n var n = input.split('\\n');\n var nAppearsFirst = n.length > 1 && n[0].length < r[0].length;\n if (r.length === 1 || nAppearsFirst)\n return '\\n';\n var numWithN = 0;\n for (var i = 0; i < r.length; i++) {\n if (r[i][0] === '\\n')\n numWithN++;\n }\n return numWithN >= r.length / 2 ? '\\r\\n' : '\\r';\n}\n/** https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions */\nfunction escapeRegExp(string) {\n return string.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&'); // $& means the whole matched string\n}\n/** The core parser implements speedy and correct CSV parsing */\nfunction Parser(config) {\n // Unpack the config object\n config = config || {};\n var delim = config.delimiter;\n var newline = config.newline;\n var comments = config.comments;\n var step = config.step;\n var preview = config.preview;\n var fastMode = config.fastMode;\n var quoteChar;\n /** Allows for no quoteChar by setting quoteChar to undefined in config */\n if (config.quoteChar === undefined) {\n quoteChar = '\"';\n }\n else {\n quoteChar = config.quoteChar;\n }\n var escapeChar = quoteChar;\n if (config.escapeChar !== undefined) {\n escapeChar = config.escapeChar;\n }\n // Delimiter must be valid\n if (typeof delim !== 'string' || Papa.BAD_DELIMITERS.indexOf(delim) > -1)\n delim = ',';\n // Comment character must be valid\n if (comments === delim)\n throw new Error('Comment character same as delimiter');\n else if (comments === true)\n comments = '#';\n else if (typeof comments !== 'string' || Papa.BAD_DELIMITERS.indexOf(comments) > -1)\n comments = false;\n // Newline must be valid: \\r, \\n, or \\r\\n\n if (newline !== '\\n' && newline !== '\\r' && newline !== '\\r\\n')\n newline = '\\n';\n // We're gonna need these at the Parser scope\n var cursor = 0;\n var aborted = false;\n // @ts-expect-error\n this.parse = function (input, baseIndex, ignoreLastRow) {\n // For some reason, in Chrome, this speeds things up (!?)\n if (typeof input !== 'string')\n throw new Error('Input must be a string');\n // We don't need to compute some of these every time parse() is called,\n // but having them in a more local scope seems to perform better\n var inputLen = input.length, delimLen = delim.length, newlineLen = newline.length, commentsLen = comments.length;\n var stepIsFunction = isFunction(step);\n // Establish starting state\n cursor = 0;\n var data = [], errors = [], row = [], lastCursor = 0;\n if (!input)\n return returnable();\n if (fastMode || (fastMode !== false && input.indexOf(quoteChar) === -1)) {\n var rows = input.split(newline);\n for (var i = 0; i < rows.length; i++) {\n const row = rows[i];\n cursor += row.length;\n if (i !== rows.length - 1)\n cursor += newline.length;\n else if (ignoreLastRow)\n return returnable();\n if (comments && row.substr(0, commentsLen) === comments)\n continue;\n if (stepIsFunction) {\n data = [];\n pushRow(row.split(delim));\n doStep();\n if (aborted)\n return returnable();\n }\n else\n pushRow(row.split(delim));\n if (preview && i >= preview) {\n data = data.slice(0, preview);\n return returnable(true);\n }\n }\n return returnable();\n }\n var nextDelim = input.indexOf(delim, cursor);\n var nextNewline = input.indexOf(newline, cursor);\n var quoteCharRegex = new RegExp(escapeRegExp(escapeChar) + escapeRegExp(quoteChar), 'g');\n var quoteSearch;\n // Parser loop\n for (;;) {\n // Field has opening quote\n if (input[cursor] === quoteChar) {\n // Start our search for the closing quote where the cursor is\n quoteSearch = cursor;\n // Skip the opening quote\n cursor++;\n for (;;) {\n // Find closing quote\n quoteSearch = input.indexOf(quoteChar, quoteSearch + 1);\n //No other quotes are found - no other delimiters\n if (quoteSearch === -1) {\n if (!ignoreLastRow) {\n // No closing quote... what a pity\n errors.push({\n type: 'Quotes',\n code: 'MissingQuotes',\n message: 'Quoted field unterminated',\n row: data.length, // row has yet to be inserted\n index: cursor\n });\n }\n return finish();\n }\n // Closing quote at EOF\n if (quoteSearch === inputLen - 1) {\n var value = input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar);\n return finish(value);\n }\n // If this quote is escaped, it's part of the data; skip it\n // If the quote character is the escape character, then check if the next character is the escape character\n if (quoteChar === escapeChar && input[quoteSearch + 1] === escapeChar) {\n quoteSearch++;\n continue;\n }\n // If the quote character is not the escape character, then check if the previous character was the escape character\n if (quoteChar !== escapeChar &&\n quoteSearch !== 0 &&\n input[quoteSearch - 1] === escapeChar) {\n continue;\n }\n // Check up to nextDelim or nextNewline, whichever is closest\n var checkUpTo = nextNewline === -1 ? nextDelim : Math.min(nextDelim, nextNewline);\n var spacesBetweenQuoteAndDelimiter = extraSpaces(checkUpTo);\n // Closing quote followed by delimiter or 'unnecessary spaces + delimiter'\n if (input[quoteSearch + 1 + spacesBetweenQuoteAndDelimiter] === delim) {\n row.push(input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar));\n cursor = quoteSearch + 1 + spacesBetweenQuoteAndDelimiter + delimLen;\n nextDelim = input.indexOf(delim, cursor);\n nextNewline = input.indexOf(newline, cursor);\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n break;\n }\n var spacesBetweenQuoteAndNewLine = extraSpaces(nextNewline);\n // Closing quote followed by newline or 'unnecessary spaces + newLine'\n if (input.substr(quoteSearch + 1 + spacesBetweenQuoteAndNewLine, newlineLen) === newline) {\n row.push(input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar));\n saveRow(quoteSearch + 1 + spacesBetweenQuoteAndNewLine + newlineLen);\n nextDelim = input.indexOf(delim, cursor); // because we may have skipped the nextDelim in the quoted field\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n break;\n }\n // Checks for valid closing quotes are complete (escaped quotes or quote followed by EOF/delimiter/newline) -- assume these quotes are part of an invalid text string\n errors.push({\n type: 'Quotes',\n code: 'InvalidQuotes',\n message: 'Trailing quote on quoted field is malformed',\n row: data.length, // row has yet to be inserted\n index: cursor\n });\n quoteSearch++;\n continue;\n }\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n continue;\n }\n // Comment found at start of new line\n if (comments && row.length === 0 && input.substr(cursor, commentsLen) === comments) {\n if (nextNewline === -1)\n // Comment ends at EOF\n return returnable();\n cursor = nextNewline + newlineLen;\n nextNewline = input.indexOf(newline, cursor);\n nextDelim = input.indexOf(delim, cursor);\n continue;\n }\n // Next delimiter comes before next newline, so we've reached end of field\n if (nextDelim !== -1 && (nextDelim < nextNewline || nextNewline === -1)) {\n row.push(input.substring(cursor, nextDelim));\n cursor = nextDelim + delimLen;\n nextDelim = input.indexOf(delim, cursor);\n continue;\n }\n // End of row\n if (nextNewline !== -1) {\n row.push(input.substring(cursor, nextNewline));\n saveRow(nextNewline + newlineLen);\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n continue;\n }\n break;\n }\n return finish();\n function pushRow(row) {\n data.push(row);\n lastCursor = cursor;\n }\n /**\n * checks if there are extra spaces after closing quote and given index without any text\n * if Yes, returns the number of spaces\n */\n function extraSpaces(index) {\n var spaceLength = 0;\n if (index !== -1) {\n var textBetweenClosingQuoteAndIndex = input.substring(quoteSearch + 1, index);\n if (textBetweenClosingQuoteAndIndex && textBetweenClosingQuoteAndIndex.trim() === '') {\n spaceLength = textBetweenClosingQuoteAndIndex.length;\n }\n }\n return spaceLength;\n }\n /**\n * Appends the remaining input from cursor to the end into\n * row, saves the row, calls step, and returns the results.\n */\n function finish(value) {\n if (ignoreLastRow)\n return returnable();\n if (typeof value === 'undefined')\n value = input.substr(cursor);\n row.push(value);\n cursor = inputLen; // important in case parsing is paused\n pushRow(row);\n if (stepIsFunction)\n doStep();\n return returnable();\n }\n /**\n * Appends the current row to the results. It sets the cursor\n * to newCursor and finds the nextNewline. The caller should\n * take care to execute user's step function and check for\n * preview and end parsing if necessary.\n */\n function saveRow(newCursor) {\n cursor = newCursor;\n pushRow(row);\n row = [];\n nextNewline = input.indexOf(newline, cursor);\n }\n /** Returns an object with the results, errors, and meta. */\n function returnable(stopped, step) {\n var isStep = step || false;\n return {\n data: isStep ? data[0] : data,\n errors: errors,\n meta: {\n delimiter: delim,\n linebreak: newline,\n aborted: aborted,\n truncated: !!stopped,\n cursor: lastCursor + (baseIndex || 0)\n }\n };\n }\n /** Executes the user's step function and resets data & errors. */\n function doStep() {\n step(returnable(undefined, true));\n data = [];\n errors = [];\n }\n };\n /** Sets the abort flag */\n // @ts-expect-error\n this.abort = function () {\n aborted = true;\n };\n /** Gets the cursor position */\n // @ts-expect-error\n this.getCharIndex = function () {\n return cursor;\n };\n}\n/** Makes a deep copy of an array or object (mostly) */\nfunction copy(obj) {\n if (typeof obj !== 'object' || obj === null)\n return obj;\n var cpy = Array.isArray(obj) ? [] : {};\n for (var key in obj)\n cpy[key] = copy(obj[key]);\n return cpy;\n}\nfunction isFunction(func) {\n return typeof func === 'function';\n}\nconst Papa = {\n parse: CsvToJson,\n unparse: JsonToCsv,\n RECORD_SEP: String.fromCharCode(30),\n UNIT_SEP: String.fromCharCode(31),\n BYTE_ORDER_MARK,\n BAD_DELIMITERS: ['\\r', '\\n', '\"', BYTE_ORDER_MARK],\n WORKERS_SUPPORTED: false, // !IS_WORKER && !!globalThis.Worker\n NODE_STREAM_INPUT: 1,\n // Configurable chunk sizes for local and remote files, respectively\n LocalChunkSize: 1024 * 1024 * 10, // 10 M,\n RemoteChunkSize: 1024 * 1024 * 5, // 5 M,\n DefaultDelimiter: ',', // Used if not specified and detection fail,\n // Exposed for testing and development only\n Parser: Parser,\n ParserHandle: ParserHandle,\n // BEGIN FORK\n ChunkStreamer: ChunkStreamer\n};\nexport default Papa;\n", "// @ts-nocheck\n// A custom papaparse `Streamer` for async iterators\n// Ideally this can be contributed back to papaparse\n// Or papaparse can expose Streamer API so we can extend without forking.\n/* eslint-disable no-invalid-this */\n// Note: papaparse is not an ES6 module\nimport Papa from \"./papaparse.js\";\nconst { ChunkStreamer } = Papa;\nexport default class AsyncIteratorStreamer extends ChunkStreamer {\n textDecoder = new TextDecoder(this._config.encoding);\n constructor(config = {}) {\n super(config);\n }\n // Implement ChunkStreamer base class methods\n // this.pause = function() {\n // ChunkStreamer.prototype.pause.apply(this, arguments);\n // };\n // this.resume = function() {\n // ChunkStreamer.prototype.resume.apply(this, arguments);\n // this._input.resume();\n // };\n async stream(asyncIterator) {\n this._input = asyncIterator;\n try {\n // ES2018 version\n // TODO - check for pause and abort flags?\n for await (const chunk of asyncIterator) {\n this.parseChunk(this.getStringChunk(chunk));\n }\n // ES5 VERSION\n // while (true) {\n // asyncIterator.next().then(function(value) {\n // if (value.done) {\n // // finalize iterator?\n // }\n // }\n // const = await ;\n // if (done) return total;\n // total += value.length;\n // }\n this._finished = true;\n this.parseChunk('');\n }\n catch (error) {\n // Inform ChunkStreamer base class of error\n this._sendError(error);\n }\n }\n _nextChunk() {\n // Left empty, as async iterator automatically pulls next chunk\n }\n // HELPER METHODS\n getStringChunk(chunk) {\n return typeof chunk === 'string' ? chunk : this.textDecoder.decode(chunk, { stream: true });\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// Copyright 2022 Foursquare Labs, Inc.\nimport { makeArrayRowIterator, getTableNumCols } from '@loaders.gl/schema';\nimport { csvFormatRows } from 'd3-dsv';\n/**\n * Encode a Table object as CSV\n */\nexport function encodeTableAsCSV(table, options = { csv: { useDisplayNames: true } }) {\n const useDisplayNames = options.useDisplayNames || options.csv?.useDisplayNames;\n const fields = table.schema?.fields || [];\n const columnNames = fields.map((f) => {\n // This is a leaky abstraction, assuming Kepler metadata\n const displayName = f.metadata?.displayName;\n return useDisplayNames && typeof displayName === 'string' ? displayName : f.name;\n });\n const formattedData = [columnNames];\n for (const row of makeArrayRowIterator(table)) {\n const formattedRow = [];\n for (let columnIndex = 0; columnIndex < getTableNumCols(table); ++columnIndex) {\n const value = row[columnIndex];\n formattedRow[columnIndex] = preformatFieldValue(value);\n }\n formattedData.push(formattedRow);\n }\n return csvFormatRows(formattedData);\n}\n/**\n * Stringifies a value\n * @todo Why is it called parse?\n */\nconst preformatFieldValue = (value) => {\n if (value === null || value === undefined) {\n // TODO: It would be nice to distinguish between missing values and the empty string\n // https://github.com/d3/d3-dsv/issues/84\n return null;\n }\n if (value instanceof Date) {\n // d3-dsv formats dates without timezones if they don't have time info;\n // this forces them to always use fully-qualified ISO time strings\n return value.toISOString();\n }\n if (typeof value === 'object') {\n return JSON.stringify(value);\n }\n return String(value);\n};\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { encodeTableAsCSV } from \"./lib/encoders/encode-csv.js\";\nexport const CSVWriter = {\n id: 'csv',\n version: 'latest',\n module: 'csv',\n name: 'CSV',\n extensions: ['csv'],\n mimeTypes: ['text/csv'],\n options: {\n csv: {\n useDisplayNames: false\n }\n },\n text: true,\n encode: async (table, options) => new TextEncoder().encode(encodeTableAsCSV(table, options)).buffer,\n encodeTextSync: (table, options) => encodeTableAsCSV(table, options)\n};\n"],
|
|
4
|
+
"sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport { CSVLoader } from \"./csv-loader.js\";\nexport { CSVWriter } from \"./csv-writer.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { AsyncQueue, TableBatchBuilder, convertToArrayRow, convertToObjectRow } from '@loaders.gl/schema';\nimport Papa from \"./papaparse/papaparse.js\";\nimport AsyncIteratorStreamer from \"./papaparse/async-iterator-streamer.js\";\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.1\" !== 'undefined' ? \"4.3.1\" : 'latest';\nconst DEFAULT_CSV_SHAPE = 'object-row-table';\nexport const CSVLoader = {\n dataType: null,\n batchType: null,\n id: 'csv',\n module: 'csv',\n name: 'CSV',\n version: VERSION,\n extensions: ['csv', 'tsv', 'dsv'],\n mimeTypes: ['text/csv', 'text/tab-separated-values', 'text/dsv'],\n category: 'table',\n parse: async (arrayBuffer, options) => parseCSV(new TextDecoder().decode(arrayBuffer), options),\n parseText: (text, options) => parseCSV(text, options),\n parseInBatches: parseCSVInBatches,\n // @ts-ignore\n // testText: null,\n options: {\n csv: {\n shape: DEFAULT_CSV_SHAPE, // 'object-row-table'\n optimizeMemoryUsage: false,\n // CSV options\n header: 'auto',\n columnPrefix: 'column',\n // delimiter: auto\n // newline: auto\n quoteChar: '\"',\n escapeChar: '\"',\n dynamicTyping: true,\n comments: false,\n skipEmptyLines: true,\n // transform: null?\n delimitersToGuess: [',', '\\t', '|', ';']\n // fastMode: auto\n }\n }\n};\nasync function parseCSV(csvText, options) {\n // Apps can call the parse method directly, we so apply default options here\n const csvOptions = { ...CSVLoader.options.csv, ...options?.csv };\n const firstRow = readFirstRow(csvText);\n const header = csvOptions.header === 'auto' ? isHeaderRow(firstRow) : Boolean(csvOptions.header);\n const parseWithHeader = header;\n const papaparseConfig = {\n // dynamicTyping: true,\n ...csvOptions,\n header: parseWithHeader,\n download: false, // We handle loading, no need for papaparse to do it for us\n transformHeader: parseWithHeader ? duplicateColumnTransformer() : undefined,\n error: (e) => {\n throw new Error(e);\n }\n };\n const result = Papa.parse(csvText, papaparseConfig);\n const rows = result.data;\n const headerRow = result.meta.fields || generateHeader(csvOptions.columnPrefix, firstRow.length);\n const shape = csvOptions.shape || DEFAULT_CSV_SHAPE;\n switch (shape) {\n case 'object-row-table':\n return {\n shape: 'object-row-table',\n data: rows.map((row) => (Array.isArray(row) ? convertToObjectRow(row, headerRow) : row))\n };\n case 'array-row-table':\n return {\n shape: 'array-row-table',\n data: rows.map((row) => (Array.isArray(row) ? row : convertToArrayRow(row, headerRow)))\n };\n default:\n throw new Error(shape);\n }\n}\n// TODO - support batch size 0 = no batching/single batch?\nfunction parseCSVInBatches(asyncIterator, options) {\n // Papaparse does not support standard batch size handling\n // TODO - investigate papaparse chunks mode\n options = { ...options };\n if (options.batchSize === 'auto') {\n options.batchSize = 4000;\n }\n // Apps can call the parse method directly, we so apply default options here\n const csvOptions = { ...CSVLoader.options.csv, ...options?.csv };\n const asyncQueue = new AsyncQueue();\n let isFirstRow = true;\n let headerRow = null;\n let tableBatchBuilder = null;\n let schema = null;\n const config = {\n // dynamicTyping: true, // Convert numbers and boolean values in rows from strings,\n ...csvOptions,\n header: false, // Unfortunately, header detection is not automatic and does not infer shapes\n download: false, // We handle loading, no need for papaparse to do it for us\n // chunkSize is set to 5MB explicitly (same as Papaparse default) due to a bug where the\n // streaming parser gets stuck if skipEmptyLines and a step callback are both supplied.\n // See https://github.com/mholt/PapaParse/issues/465\n chunkSize: 1024 * 1024 * 5,\n // skipEmptyLines is set to a boolean value if supplied. Greedy is set to true\n // skipEmptyLines is handled manually given two bugs where the streaming parser gets stuck if\n // both of the skipEmptyLines and step callback options are provided:\n // - true doesn't work unless chunkSize is set: https://github.com/mholt/PapaParse/issues/465\n // - greedy doesn't work: https://github.com/mholt/PapaParse/issues/825\n skipEmptyLines: false,\n // step is called on every row\n // eslint-disable-next-line complexity, max-statements\n step(results) {\n let row = results.data;\n if (csvOptions.skipEmptyLines) {\n // Manually reject lines that are empty\n const collapsedRow = row.flat().join('').trim();\n if (collapsedRow === '') {\n return;\n }\n }\n const bytesUsed = results.meta.cursor;\n // Check if we need to save a header row\n if (isFirstRow && !headerRow) {\n // Auto detects or can be forced with csvOptions.header\n const header = csvOptions.header === 'auto' ? isHeaderRow(row) : Boolean(csvOptions.header);\n if (header) {\n headerRow = row.map(duplicateColumnTransformer());\n return;\n }\n }\n // If first data row, we can deduce the schema\n if (isFirstRow) {\n isFirstRow = false;\n if (!headerRow) {\n headerRow = generateHeader(csvOptions.columnPrefix, row.length);\n }\n schema = deduceSchema(row, headerRow);\n }\n if (csvOptions.optimizeMemoryUsage) {\n // A workaround to allocate new strings and don't retain pointers to original strings.\n // https://bugs.chromium.org/p/v8/issues/detail?id=2869\n row = JSON.parse(JSON.stringify(row));\n }\n const shape = csvOptions.shape || DEFAULT_CSV_SHAPE;\n // Add the row\n tableBatchBuilder =\n tableBatchBuilder ||\n new TableBatchBuilder(\n // @ts-expect-error TODO this is not a proper schema\n schema, {\n shape,\n ...options\n });\n try {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder && tableBatchBuilder.getFullBatch({ bytesUsed });\n if (batch) {\n asyncQueue.enqueue(batch);\n }\n }\n catch (error) {\n asyncQueue.enqueue(error);\n }\n },\n // complete is called when all rows have been read\n complete(results) {\n try {\n const bytesUsed = results.meta.cursor;\n // Ensure any final (partial) batch gets emitted\n const batch = tableBatchBuilder && tableBatchBuilder.getFinalBatch({ bytesUsed });\n if (batch) {\n asyncQueue.enqueue(batch);\n }\n }\n catch (error) {\n asyncQueue.enqueue(error);\n }\n asyncQueue.close();\n }\n };\n Papa.parse(asyncIterator, config, AsyncIteratorStreamer);\n // TODO - Does it matter if we return asyncIterable or asyncIterator\n // return asyncQueue[Symbol.asyncIterator]();\n return asyncQueue;\n}\n/**\n * Checks if a certain row is a header row\n * @param row the row to check\n * @returns true if the row looks like a header\n */\nfunction isHeaderRow(row) {\n return row && row.every((value) => typeof value === 'string');\n}\n/**\n * Reads, parses, and returns the first row of a CSV text\n * @param csvText the csv text to parse\n * @returns the first row\n */\nfunction readFirstRow(csvText) {\n const result = Papa.parse(csvText, {\n dynamicTyping: true,\n preview: 1\n });\n return result.data[0];\n}\n/**\n * Creates a transformer that renames duplicate columns. This is needed as Papaparse doesn't handle\n * duplicate header columns and would use the latest occurrence by default.\n * See the header option in https://www.papaparse.com/docs#config\n * @returns a transform function that returns sanitized names for duplicate fields\n */\nfunction duplicateColumnTransformer() {\n const observedColumns = new Set();\n return (col) => {\n let colName = col;\n let counter = 1;\n while (observedColumns.has(colName)) {\n colName = `${col}.${counter}`;\n counter++;\n }\n observedColumns.add(colName);\n return colName;\n };\n}\n/**\n * Generates the header of a CSV given a prefix and a column count\n * @param columnPrefix the columnPrefix to use\n * @param count the count of column names to generate\n * @returns an array of column names\n */\nfunction generateHeader(columnPrefix, count = 0) {\n const headers = [];\n for (let i = 0; i < count; i++) {\n headers.push(`${columnPrefix}${i + 1}`);\n }\n return headers;\n}\nfunction deduceSchema(row, headerRow) {\n const schema = headerRow ? {} : [];\n for (let i = 0; i < row.length; i++) {\n const columnName = (headerRow && headerRow[i]) || i;\n const value = row[i];\n switch (typeof value) {\n case 'number':\n case 'boolean':\n // TODO - booleans could be handled differently...\n schema[columnName] = { name: String(columnName), index: i, type: Float32Array };\n break;\n case 'string':\n default:\n schema[columnName] = { name: String(columnName), index: i, type: Array };\n // We currently only handle numeric rows\n // TODO we could offer a function to map strings to numbers?\n }\n }\n return schema;\n}\n", "// This is a fork of papaparse\n// https://github.com/mholt/PapaParse\n/* @license\nPapa Parse\nv5.0.0-beta.0\nhttps://github.com/mholt/PapaParse\nLicense: MIT\n*/\n// FORK SUMMARY:\n// - Adopt ES6 exports\n// - Implement new AsyncIteratorStreamer\n// - Remove non Async Iterator streamers (can all be handled by new streamer)\n// - Remove unused Worker support (loaders.gl worker system used instead)\n// - Remove unused jQuery plugin support\n// const defaultConfig: Required<CSVParserConfig> = {\n// dynamicTyping: false,\n// dynamicTypingFunction: undefined!,\n// transform: false\n// };\n/* eslint-disable */\nconst BYTE_ORDER_MARK = '\\ufeff';\nfunction CsvToJson(_input, _config = {}, Streamer = StringStreamer) {\n _config = _config || {};\n var dynamicTyping = _config.dynamicTyping || false;\n if (isFunction(dynamicTyping)) {\n _config.dynamicTypingFunction = dynamicTyping;\n // Will be filled on first row call\n dynamicTyping = {};\n }\n _config.dynamicTyping = dynamicTyping;\n _config.transform = isFunction(_config.transform) ? _config.transform : false;\n var streamer = new Streamer(_config);\n return streamer.stream(_input);\n}\nfunction JsonToCsv(_input, _config) {\n // Default configuration\n /** whether to surround every datum with quotes */\n var _quotes = false;\n /** whether to write headers */\n var _writeHeader = true;\n /** delimiting character(s) */\n var _delimiter = ',';\n /** newline character(s) */\n var _newline = '\\r\\n';\n /** quote character */\n var _quoteChar = '\"';\n /** escaped quote character, either \"\" or <config.escapeChar>\" */\n var _escapedQuote = _quoteChar + _quoteChar;\n /** whether to skip empty lines */\n var _skipEmptyLines = false;\n /** the columns (keys) we expect when we unparse objects */\n var _columns = null;\n unpackConfig();\n var quoteCharRegex = new RegExp(escapeRegExp(_quoteChar), 'g');\n if (typeof _input === 'string')\n _input = JSON.parse(_input);\n if (Array.isArray(_input)) {\n if (!_input.length || Array.isArray(_input[0]))\n return serialize(null, _input, _skipEmptyLines);\n else if (typeof _input[0] === 'object')\n return serialize(_columns || Object.keys(_input[0]), _input, _skipEmptyLines);\n }\n else if (typeof _input === 'object') {\n if (typeof _input.data === 'string')\n _input.data = JSON.parse(_input.data);\n if (Array.isArray(_input.data)) {\n if (!_input.fields)\n _input.fields = _input.meta && _input.meta.fields;\n if (!_input.fields)\n _input.fields = Array.isArray(_input.data[0]) ? _input.fields : Object.keys(_input.data[0]);\n if (!Array.isArray(_input.data[0]) && typeof _input.data[0] !== 'object')\n _input.data = [_input.data]; // handles input like [1,2,3] or ['asdf']\n }\n return serialize(_input.fields || [], _input.data || [], _skipEmptyLines);\n }\n // Default (any valid paths should return before this)\n throw new Error('Unable to serialize unrecognized input');\n function unpackConfig() {\n if (typeof _config !== 'object')\n return;\n if (typeof _config.delimiter === 'string' &&\n !Papa.BAD_DELIMITERS.filter(function (value) {\n return _config.delimiter.indexOf(value) !== -1;\n }).length) {\n _delimiter = _config.delimiter;\n }\n if (typeof _config.quotes === 'boolean' || Array.isArray(_config.quotes))\n _quotes = _config.quotes;\n if (typeof _config.skipEmptyLines === 'boolean' || typeof _config.skipEmptyLines === 'string')\n _skipEmptyLines = _config.skipEmptyLines;\n if (typeof _config.newline === 'string')\n _newline = _config.newline;\n if (typeof _config.quoteChar === 'string')\n _quoteChar = _config.quoteChar;\n if (typeof _config.header === 'boolean')\n _writeHeader = _config.header;\n if (Array.isArray(_config.columns)) {\n if (_config.columns.length === 0)\n throw new Error('Option columns is empty');\n _columns = _config.columns;\n }\n if (_config.escapeChar !== undefined) {\n _escapedQuote = _config.escapeChar + _quoteChar;\n }\n }\n /** The double for loop that iterates the data and writes out a CSV string including header row */\n function serialize(fields, data, skipEmptyLines) {\n var csv = '';\n if (typeof fields === 'string')\n fields = JSON.parse(fields);\n if (typeof data === 'string')\n data = JSON.parse(data);\n var hasHeader = Array.isArray(fields) && fields.length > 0;\n var dataKeyedByField = !Array.isArray(data[0]);\n // If there a header row, write it first\n if (hasHeader && _writeHeader) {\n for (var i = 0; i < fields.length; i++) {\n if (i > 0)\n csv += _delimiter;\n csv += safe(fields[i], i);\n }\n if (data.length > 0)\n csv += _newline;\n }\n // Then write out the data\n for (var row = 0; row < data.length; row++) {\n var maxCol = hasHeader ? fields.length : data[row].length;\n var emptyLine = false;\n var nullLine = hasHeader ? Object.keys(data[row]).length === 0 : data[row].length === 0;\n if (skipEmptyLines && !hasHeader) {\n emptyLine =\n skipEmptyLines === 'greedy'\n ? data[row].join('').trim() === ''\n : data[row].length === 1 && data[row][0].length === 0;\n }\n if (skipEmptyLines === 'greedy' && hasHeader) {\n var line = [];\n for (var c = 0; c < maxCol; c++) {\n var cx = dataKeyedByField ? fields[c] : c;\n line.push(data[row][cx]);\n }\n emptyLine = line.join('').trim() === '';\n }\n if (!emptyLine) {\n for (var col = 0; col < maxCol; col++) {\n if (col > 0 && !nullLine)\n csv += _delimiter;\n var colIdx = hasHeader && dataKeyedByField ? fields[col] : col;\n csv += safe(data[row][colIdx], col);\n }\n if (row < data.length - 1 && (!skipEmptyLines || (maxCol > 0 && !nullLine))) {\n csv += _newline;\n }\n }\n }\n return csv;\n }\n /** Encloses a value around quotes if needed (makes a value safe for CSV insertion) */\n function safe(str, col) {\n if (typeof str === 'undefined' || str === null)\n return '';\n if (str.constructor === Date)\n return JSON.stringify(str).slice(1, 25);\n str = str.toString().replace(quoteCharRegex, _escapedQuote);\n var needsQuotes = (typeof _quotes === 'boolean' && _quotes) ||\n (Array.isArray(_quotes) && _quotes[col]) ||\n hasAny(str, Papa.BAD_DELIMITERS) ||\n str.indexOf(_delimiter) > -1 ||\n str.charAt(0) === ' ' ||\n str.charAt(str.length - 1) === ' ';\n return needsQuotes ? _quoteChar + str + _quoteChar : str;\n }\n function hasAny(str, substrings) {\n for (var i = 0; i < substrings.length; i++)\n if (str.indexOf(substrings[i]) > -1)\n return true;\n return false;\n }\n}\n/** ChunkStreamer is the base prototype for various streamer implementations. */\nclass ChunkStreamer {\n _handle;\n _config;\n _finished = false;\n _completed = false;\n _input = null;\n _baseIndex = 0;\n _partialLine = '';\n _rowCount = 0;\n _start = 0;\n isFirstChunk = true;\n _completeResults = {\n data: [],\n errors: [],\n meta: {}\n };\n constructor(config) {\n // Deep-copy the config so we can edit it\n var configCopy = { ...config };\n // @ts-expect-error\n configCopy.chunkSize = parseInt(configCopy.chunkSize); // parseInt VERY important so we don't concatenate strings!\n if (!config.step && !config.chunk) {\n configCopy.chunkSize = null; // disable Range header if not streaming; bad values break IIS - see issue #196\n }\n this._handle = new ParserHandle(configCopy);\n this._handle.streamer = this;\n this._config = configCopy; // persist the copy to the caller\n }\n parseChunk(chunk, isFakeChunk) {\n // First chunk pre-processing\n if (this.isFirstChunk && isFunction(this._config.beforeFirstChunk)) {\n var modifiedChunk = this._config.beforeFirstChunk(chunk);\n if (modifiedChunk !== undefined)\n chunk = modifiedChunk;\n }\n this.isFirstChunk = false;\n // Rejoin the line we likely just split in two by chunking the file\n var aggregate = this._partialLine + chunk;\n this._partialLine = '';\n var results = this._handle.parse(aggregate, this._baseIndex, !this._finished);\n if (this._handle.paused() || this._handle.aborted())\n return;\n var lastIndex = results.meta.cursor;\n if (!this._finished) {\n this._partialLine = aggregate.substring(lastIndex - this._baseIndex);\n this._baseIndex = lastIndex;\n }\n if (results && results.data)\n this._rowCount += results.data.length;\n var finishedIncludingPreview = this._finished || (this._config.preview && this._rowCount >= this._config.preview);\n if (isFunction(this._config.chunk) && !isFakeChunk) {\n this._config.chunk(results, this._handle);\n if (this._handle.paused() || this._handle.aborted())\n return;\n results = undefined;\n // @ts-expect-error\n this._completeResults = undefined;\n }\n if (!this._config.step && !this._config.chunk) {\n this._completeResults.data = this._completeResults.data.concat(results.data);\n this._completeResults.errors = this._completeResults.errors.concat(results.errors);\n this._completeResults.meta = results.meta;\n }\n if (!this._completed &&\n finishedIncludingPreview &&\n isFunction(this._config.complete) &&\n (!results || !results.meta.aborted)) {\n this._config.complete(this._completeResults, this._input);\n this._completed = true;\n }\n // if (!finishedIncludingPreview && (!results || !results.meta.paused)) this._nextChunk();\n return results;\n }\n _sendError(error) {\n if (isFunction(this._config.error))\n this._config.error(error);\n }\n}\nclass StringStreamer extends ChunkStreamer {\n remaining;\n constructor(config = {}) {\n super(config);\n }\n stream(s) {\n this.remaining = s;\n return this._nextChunk();\n }\n _nextChunk() {\n if (this._finished)\n return;\n var size = this._config.chunkSize;\n var chunk = size ? this.remaining.substr(0, size) : this.remaining;\n this.remaining = size ? this.remaining.substr(size) : '';\n this._finished = !this.remaining;\n return this.parseChunk(chunk);\n }\n}\nconst FLOAT = /^\\s*-?(\\d*\\.?\\d+|\\d+\\.?\\d*)(e[-+]?\\d+)?\\s*$/i;\nconst ISO_DATE = /(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))/;\n// Use one ParserHandle per entire CSV file or string\nclass ParserHandle {\n _config;\n /** Number of times step was called (number of rows parsed) */\n _stepCounter = 0;\n /** Number of rows that have been parsed so far */\n _rowCounter = 0;\n /** The input being parsed */\n _input;\n /** The core parser being used */\n _parser;\n /** Whether we are paused or not */\n _paused = false;\n /** Whether the parser has aborted or not */\n _aborted = false;\n /** Temporary state between delimiter detection and processing results */\n _delimiterError = false;\n /** Fields are from the header row of the input, if there is one */\n _fields = [];\n /** The last results returned from the parser */\n _results = {\n data: [],\n errors: [],\n meta: {}\n };\n constructor(_config) {\n // One goal is to minimize the use of regular expressions...\n if (isFunction(_config.step)) {\n var userStep = _config.step;\n _config.step = (results) => {\n this._results = results;\n if (this.needsHeaderRow()) {\n this.processResults();\n }\n // only call user's step function after header row\n else {\n this.processResults();\n // It's possbile that this line was empty and there's no row here after all\n if (!this._results.data || this._results.data.length === 0)\n return;\n this._stepCounter += results.data.length;\n if (_config.preview && this._stepCounter > _config.preview) {\n this._parser.abort();\n }\n else {\n userStep(this._results, this);\n }\n }\n };\n }\n this._config = _config;\n }\n /**\n * Parses input. Most users won't need, and shouldn't mess with, the baseIndex\n * and ignoreLastRow parameters. They are used by streamers (wrapper functions)\n * when an input comes in multiple chunks, like from a file.\n */\n parse(input, baseIndex, ignoreLastRow) {\n var quoteChar = this._config.quoteChar || '\"';\n if (!this._config.newline)\n this._config.newline = guessLineEndings(input, quoteChar);\n this._delimiterError = false;\n if (!this._config.delimiter) {\n var delimGuess = this.guessDelimiter(input, this._config.newline, this._config.skipEmptyLines, this._config.comments, this._config.delimitersToGuess);\n if (delimGuess.successful) {\n this._config.delimiter = delimGuess.bestDelimiter;\n }\n else {\n this._delimiterError = true; // add error after parsing (otherwise it would be overwritten)\n this._config.delimiter = Papa.DefaultDelimiter;\n }\n this._results.meta.delimiter = this._config.delimiter;\n }\n else if (isFunction(this._config.delimiter)) {\n this._config.delimiter = this._config.delimiter(input);\n this._results.meta.delimiter = this._config.delimiter;\n }\n var parserConfig = copy(this._config);\n if (this._config.preview && this._config.header)\n parserConfig.preview++; // to compensate for header row\n this._input = input;\n this._parser = new Parser(parserConfig);\n this._results = this._parser.parse(this._input, baseIndex, ignoreLastRow);\n this.processResults();\n return this._paused ? { meta: { paused: true } } : this._results || { meta: { paused: false } };\n }\n paused() {\n return this._paused;\n }\n pause() {\n this._paused = true;\n this._parser.abort();\n this._input = this._input.substr(this._parser.getCharIndex());\n }\n resume() {\n this._paused = false;\n // @ts-expect-error\n this.streamer.parseChunk(this._input, true);\n }\n aborted() {\n return this._aborted;\n }\n abort() {\n this._aborted = true;\n this._parser.abort();\n this._results.meta.aborted = true;\n if (isFunction(this._config.complete)) {\n this._config.complete(this._results);\n }\n this._input = '';\n }\n testEmptyLine(s) {\n return this._config.skipEmptyLines === 'greedy'\n ? s.join('').trim() === ''\n : s.length === 1 && s[0].length === 0;\n }\n processResults() {\n if (this._results && this._delimiterError) {\n this.addError('Delimiter', 'UndetectableDelimiter', \"Unable to auto-detect delimiting character; defaulted to '\" + Papa.DefaultDelimiter + \"'\");\n this._delimiterError = false;\n }\n if (this._config.skipEmptyLines) {\n for (var i = 0; i < this._results.data.length; i++)\n if (this.testEmptyLine(this._results.data[i]))\n this._results.data.splice(i--, 1);\n }\n if (this.needsHeaderRow()) {\n this.fillHeaderFields();\n }\n return this.applyHeaderAndDynamicTypingAndTransformation();\n }\n needsHeaderRow() {\n return this._config.header && this._fields.length === 0;\n }\n fillHeaderFields() {\n if (!this._results)\n return;\n const addHeder = (header) => {\n if (isFunction(this._config.transformHeader))\n header = this._config.transformHeader(header);\n this._fields.push(header);\n };\n if (Array.isArray(this._results.data[0])) {\n for (var i = 0; this.needsHeaderRow() && i < this._results.data.length; i++)\n this._results.data[i].forEach(addHeder);\n this._results.data.splice(0, 1);\n }\n // if _results.data[0] is not an array, we are in a step where _results.data is the row.\n else {\n this._results.data.forEach(addHeder);\n }\n }\n shouldApplyDynamicTyping(field) {\n // Cache function values to avoid calling it for each row\n if (this._config.dynamicTypingFunction && this._config.dynamicTyping[field] === undefined) {\n this._config.dynamicTyping[field] = this._config.dynamicTypingFunction(field);\n }\n return (this._config.dynamicTyping[field] || this._config.dynamicTyping) === true;\n }\n parseDynamic(field, value) {\n if (this.shouldApplyDynamicTyping(field)) {\n if (value === 'true' || value === 'TRUE')\n return true;\n else if (value === 'false' || value === 'FALSE')\n return false;\n else if (FLOAT.test(value))\n return parseFloat(value);\n else if (ISO_DATE.test(value))\n return new Date(value);\n else\n return value === '' ? null : value;\n }\n return value;\n }\n applyHeaderAndDynamicTypingAndTransformation() {\n if (!this._results ||\n !this._results.data ||\n (!this._config.header && !this._config.dynamicTyping && !this._config.transform)) {\n return this._results;\n }\n var incrementBy = 1;\n if (!this._results.data[0] || Array.isArray(this._results.data[0])) {\n this._results.data = this._results.data.map(this.processRow.bind(this));\n incrementBy = this._results.data.length;\n }\n else {\n // @ts-expect-error\n this._results.data = this.processRow(this._results.data, 0);\n }\n if (this._config.header && this._results.meta)\n this._results.meta.fields = this._fields;\n this._rowCounter += incrementBy;\n return this._results;\n }\n processRow(rowSource, i) {\n var row = this._config.header ? {} : [];\n var j;\n for (j = 0; j < rowSource.length; j++) {\n var field = j;\n var value = rowSource[j];\n if (this._config.header)\n field = j >= this._fields.length ? '__parsed_extra' : this._fields[j];\n if (this._config.transform)\n value = this._config.transform(value, field);\n value = this.parseDynamic(field, value);\n if (field === '__parsed_extra') {\n row[field] = row[field] || [];\n row[field].push(value);\n }\n else\n row[field] = value;\n }\n if (this._config.header) {\n if (j > this._fields.length)\n this.addError('FieldMismatch', 'TooManyFields', 'Too many fields: expected ' + this._fields.length + ' fields but parsed ' + j, this._rowCounter + i);\n else if (j < this._fields.length)\n this.addError('FieldMismatch', 'TooFewFields', 'Too few fields: expected ' + this._fields.length + ' fields but parsed ' + j, this._rowCounter + i);\n }\n return row;\n }\n guessDelimiter(input, newline, skipEmptyLines, comments, delimitersToGuess) {\n var bestDelim, bestDelta, fieldCountPrevRow;\n delimitersToGuess = delimitersToGuess || [',', '\\t', '|', ';', Papa.RECORD_SEP, Papa.UNIT_SEP];\n for (var i = 0; i < delimitersToGuess.length; i++) {\n var delim = delimitersToGuess[i];\n var delta = 0, avgFieldCount = 0, emptyLinesCount = 0;\n fieldCountPrevRow = undefined;\n var preview = new Parser({\n comments: comments,\n delimiter: delim,\n newline: newline,\n preview: 10\n }).parse(input);\n for (var j = 0; j < preview.data.length; j++) {\n if (skipEmptyLines && this.testEmptyLine(preview.data[j])) {\n emptyLinesCount++;\n continue;\n }\n var fieldCount = preview.data[j].length;\n avgFieldCount += fieldCount;\n if (typeof fieldCountPrevRow === 'undefined') {\n fieldCountPrevRow = 0;\n continue;\n }\n else if (fieldCount > 1) {\n delta += Math.abs(fieldCount - fieldCountPrevRow);\n fieldCountPrevRow = fieldCount;\n }\n }\n if (preview.data.length > 0)\n avgFieldCount /= preview.data.length - emptyLinesCount;\n if ((typeof bestDelta === 'undefined' || delta > bestDelta) && avgFieldCount > 1.99) {\n bestDelta = delta;\n bestDelim = delim;\n }\n }\n this._config.delimiter = bestDelim;\n return {\n successful: !!bestDelim,\n bestDelimiter: bestDelim\n };\n }\n addError(type, code, msg, row) {\n this._results.errors.push({\n type: type,\n code: code,\n message: msg,\n row: row\n });\n }\n}\nfunction guessLineEndings(input, quoteChar) {\n input = input.substr(0, 1024 * 1024); // max length 1 MB\n // Replace all the text inside quotes\n var re = new RegExp(escapeRegExp(quoteChar) + '([^]*?)' + escapeRegExp(quoteChar), 'gm');\n input = input.replace(re, '');\n var r = input.split('\\r');\n var n = input.split('\\n');\n var nAppearsFirst = n.length > 1 && n[0].length < r[0].length;\n if (r.length === 1 || nAppearsFirst)\n return '\\n';\n var numWithN = 0;\n for (var i = 0; i < r.length; i++) {\n if (r[i][0] === '\\n')\n numWithN++;\n }\n return numWithN >= r.length / 2 ? '\\r\\n' : '\\r';\n}\n/** https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions */\nfunction escapeRegExp(string) {\n return string.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&'); // $& means the whole matched string\n}\n/** The core parser implements speedy and correct CSV parsing */\nfunction Parser(config) {\n // Unpack the config object\n config = config || {};\n var delim = config.delimiter;\n var newline = config.newline;\n var comments = config.comments;\n var step = config.step;\n var preview = config.preview;\n var fastMode = config.fastMode;\n var quoteChar;\n /** Allows for no quoteChar by setting quoteChar to undefined in config */\n if (config.quoteChar === undefined) {\n quoteChar = '\"';\n }\n else {\n quoteChar = config.quoteChar;\n }\n var escapeChar = quoteChar;\n if (config.escapeChar !== undefined) {\n escapeChar = config.escapeChar;\n }\n // Delimiter must be valid\n if (typeof delim !== 'string' || Papa.BAD_DELIMITERS.indexOf(delim) > -1)\n delim = ',';\n // Comment character must be valid\n if (comments === delim)\n throw new Error('Comment character same as delimiter');\n else if (comments === true)\n comments = '#';\n else if (typeof comments !== 'string' || Papa.BAD_DELIMITERS.indexOf(comments) > -1)\n comments = false;\n // Newline must be valid: \\r, \\n, or \\r\\n\n if (newline !== '\\n' && newline !== '\\r' && newline !== '\\r\\n')\n newline = '\\n';\n // We're gonna need these at the Parser scope\n var cursor = 0;\n var aborted = false;\n // @ts-expect-error\n this.parse = function (input, baseIndex, ignoreLastRow) {\n // For some reason, in Chrome, this speeds things up (!?)\n if (typeof input !== 'string')\n throw new Error('Input must be a string');\n // We don't need to compute some of these every time parse() is called,\n // but having them in a more local scope seems to perform better\n var inputLen = input.length, delimLen = delim.length, newlineLen = newline.length, commentsLen = comments.length;\n var stepIsFunction = isFunction(step);\n // Establish starting state\n cursor = 0;\n var data = [], errors = [], row = [], lastCursor = 0;\n if (!input)\n return returnable();\n if (fastMode || (fastMode !== false && input.indexOf(quoteChar) === -1)) {\n var rows = input.split(newline);\n for (var i = 0; i < rows.length; i++) {\n const row = rows[i];\n cursor += row.length;\n if (i !== rows.length - 1)\n cursor += newline.length;\n else if (ignoreLastRow)\n return returnable();\n if (comments && row.substr(0, commentsLen) === comments)\n continue;\n if (stepIsFunction) {\n data = [];\n pushRow(row.split(delim));\n doStep();\n if (aborted)\n return returnable();\n }\n else\n pushRow(row.split(delim));\n if (preview && i >= preview) {\n data = data.slice(0, preview);\n return returnable(true);\n }\n }\n return returnable();\n }\n var nextDelim = input.indexOf(delim, cursor);\n var nextNewline = input.indexOf(newline, cursor);\n var quoteCharRegex = new RegExp(escapeRegExp(escapeChar) + escapeRegExp(quoteChar), 'g');\n var quoteSearch;\n // Parser loop\n for (;;) {\n // Field has opening quote\n if (input[cursor] === quoteChar) {\n // Start our search for the closing quote where the cursor is\n quoteSearch = cursor;\n // Skip the opening quote\n cursor++;\n for (;;) {\n // Find closing quote\n quoteSearch = input.indexOf(quoteChar, quoteSearch + 1);\n //No other quotes are found - no other delimiters\n if (quoteSearch === -1) {\n if (!ignoreLastRow) {\n // No closing quote... what a pity\n errors.push({\n type: 'Quotes',\n code: 'MissingQuotes',\n message: 'Quoted field unterminated',\n row: data.length, // row has yet to be inserted\n index: cursor\n });\n }\n return finish();\n }\n // Closing quote at EOF\n if (quoteSearch === inputLen - 1) {\n var value = input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar);\n return finish(value);\n }\n // If this quote is escaped, it's part of the data; skip it\n // If the quote character is the escape character, then check if the next character is the escape character\n if (quoteChar === escapeChar && input[quoteSearch + 1] === escapeChar) {\n quoteSearch++;\n continue;\n }\n // If the quote character is not the escape character, then check if the previous character was the escape character\n if (quoteChar !== escapeChar &&\n quoteSearch !== 0 &&\n input[quoteSearch - 1] === escapeChar) {\n continue;\n }\n // Check up to nextDelim or nextNewline, whichever is closest\n var checkUpTo = nextNewline === -1 ? nextDelim : Math.min(nextDelim, nextNewline);\n var spacesBetweenQuoteAndDelimiter = extraSpaces(checkUpTo);\n // Closing quote followed by delimiter or 'unnecessary spaces + delimiter'\n if (input[quoteSearch + 1 + spacesBetweenQuoteAndDelimiter] === delim) {\n row.push(input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar));\n cursor = quoteSearch + 1 + spacesBetweenQuoteAndDelimiter + delimLen;\n nextDelim = input.indexOf(delim, cursor);\n nextNewline = input.indexOf(newline, cursor);\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n break;\n }\n var spacesBetweenQuoteAndNewLine = extraSpaces(nextNewline);\n // Closing quote followed by newline or 'unnecessary spaces + newLine'\n if (input.substr(quoteSearch + 1 + spacesBetweenQuoteAndNewLine, newlineLen) === newline) {\n row.push(input.substring(cursor, quoteSearch).replace(quoteCharRegex, quoteChar));\n saveRow(quoteSearch + 1 + spacesBetweenQuoteAndNewLine + newlineLen);\n nextDelim = input.indexOf(delim, cursor); // because we may have skipped the nextDelim in the quoted field\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n break;\n }\n // Checks for valid closing quotes are complete (escaped quotes or quote followed by EOF/delimiter/newline) -- assume these quotes are part of an invalid text string\n errors.push({\n type: 'Quotes',\n code: 'InvalidQuotes',\n message: 'Trailing quote on quoted field is malformed',\n row: data.length, // row has yet to be inserted\n index: cursor\n });\n quoteSearch++;\n continue;\n }\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n continue;\n }\n // Comment found at start of new line\n if (comments && row.length === 0 && input.substr(cursor, commentsLen) === comments) {\n if (nextNewline === -1)\n // Comment ends at EOF\n return returnable();\n cursor = nextNewline + newlineLen;\n nextNewline = input.indexOf(newline, cursor);\n nextDelim = input.indexOf(delim, cursor);\n continue;\n }\n // Next delimiter comes before next newline, so we've reached end of field\n if (nextDelim !== -1 && (nextDelim < nextNewline || nextNewline === -1)) {\n row.push(input.substring(cursor, nextDelim));\n cursor = nextDelim + delimLen;\n nextDelim = input.indexOf(delim, cursor);\n continue;\n }\n // End of row\n if (nextNewline !== -1) {\n row.push(input.substring(cursor, nextNewline));\n saveRow(nextNewline + newlineLen);\n if (stepIsFunction) {\n doStep();\n if (aborted)\n return returnable();\n }\n if (preview && data.length >= preview)\n return returnable(true);\n continue;\n }\n break;\n }\n return finish();\n function pushRow(row) {\n data.push(row);\n lastCursor = cursor;\n }\n /**\n * checks if there are extra spaces after closing quote and given index without any text\n * if Yes, returns the number of spaces\n */\n function extraSpaces(index) {\n var spaceLength = 0;\n if (index !== -1) {\n var textBetweenClosingQuoteAndIndex = input.substring(quoteSearch + 1, index);\n if (textBetweenClosingQuoteAndIndex && textBetweenClosingQuoteAndIndex.trim() === '') {\n spaceLength = textBetweenClosingQuoteAndIndex.length;\n }\n }\n return spaceLength;\n }\n /**\n * Appends the remaining input from cursor to the end into\n * row, saves the row, calls step, and returns the results.\n */\n function finish(value) {\n if (ignoreLastRow)\n return returnable();\n if (typeof value === 'undefined')\n value = input.substr(cursor);\n row.push(value);\n cursor = inputLen; // important in case parsing is paused\n pushRow(row);\n if (stepIsFunction)\n doStep();\n return returnable();\n }\n /**\n * Appends the current row to the results. It sets the cursor\n * to newCursor and finds the nextNewline. The caller should\n * take care to execute user's step function and check for\n * preview and end parsing if necessary.\n */\n function saveRow(newCursor) {\n cursor = newCursor;\n pushRow(row);\n row = [];\n nextNewline = input.indexOf(newline, cursor);\n }\n /** Returns an object with the results, errors, and meta. */\n function returnable(stopped, step) {\n var isStep = step || false;\n return {\n data: isStep ? data[0] : data,\n errors: errors,\n meta: {\n delimiter: delim,\n linebreak: newline,\n aborted: aborted,\n truncated: !!stopped,\n cursor: lastCursor + (baseIndex || 0)\n }\n };\n }\n /** Executes the user's step function and resets data & errors. */\n function doStep() {\n step(returnable(undefined, true));\n data = [];\n errors = [];\n }\n };\n /** Sets the abort flag */\n // @ts-expect-error\n this.abort = function () {\n aborted = true;\n };\n /** Gets the cursor position */\n // @ts-expect-error\n this.getCharIndex = function () {\n return cursor;\n };\n}\n/** Makes a deep copy of an array or object (mostly) */\nfunction copy(obj) {\n if (typeof obj !== 'object' || obj === null)\n return obj;\n var cpy = Array.isArray(obj) ? [] : {};\n for (var key in obj)\n cpy[key] = copy(obj[key]);\n return cpy;\n}\nfunction isFunction(func) {\n return typeof func === 'function';\n}\nconst Papa = {\n parse: CsvToJson,\n unparse: JsonToCsv,\n RECORD_SEP: String.fromCharCode(30),\n UNIT_SEP: String.fromCharCode(31),\n BYTE_ORDER_MARK,\n BAD_DELIMITERS: ['\\r', '\\n', '\"', BYTE_ORDER_MARK],\n WORKERS_SUPPORTED: false, // !IS_WORKER && !!globalThis.Worker\n NODE_STREAM_INPUT: 1,\n // Configurable chunk sizes for local and remote files, respectively\n LocalChunkSize: 1024 * 1024 * 10, // 10 M,\n RemoteChunkSize: 1024 * 1024 * 5, // 5 M,\n DefaultDelimiter: ',', // Used if not specified and detection fail,\n // Exposed for testing and development only\n Parser: Parser,\n ParserHandle: ParserHandle,\n // BEGIN FORK\n ChunkStreamer: ChunkStreamer\n};\nexport default Papa;\n", "// @ts-nocheck\n// A custom papaparse `Streamer` for async iterators\n// Ideally this can be contributed back to papaparse\n// Or papaparse can expose Streamer API so we can extend without forking.\n/* eslint-disable no-invalid-this */\n// Note: papaparse is not an ES6 module\nimport Papa from \"./papaparse.js\";\nconst { ChunkStreamer } = Papa;\nexport default class AsyncIteratorStreamer extends ChunkStreamer {\n textDecoder = new TextDecoder(this._config.encoding);\n constructor(config = {}) {\n super(config);\n }\n // Implement ChunkStreamer base class methods\n // this.pause = function() {\n // ChunkStreamer.prototype.pause.apply(this, arguments);\n // };\n // this.resume = function() {\n // ChunkStreamer.prototype.resume.apply(this, arguments);\n // this._input.resume();\n // };\n async stream(asyncIterator) {\n this._input = asyncIterator;\n try {\n // ES2018 version\n // TODO - check for pause and abort flags?\n for await (const chunk of asyncIterator) {\n this.parseChunk(this.getStringChunk(chunk));\n }\n // ES5 VERSION\n // while (true) {\n // asyncIterator.next().then(function(value) {\n // if (value.done) {\n // // finalize iterator?\n // }\n // }\n // const = await ;\n // if (done) return total;\n // total += value.length;\n // }\n this._finished = true;\n this.parseChunk('');\n }\n catch (error) {\n // Inform ChunkStreamer base class of error\n this._sendError(error);\n }\n }\n _nextChunk() {\n // Left empty, as async iterator automatically pulls next chunk\n }\n // HELPER METHODS\n getStringChunk(chunk) {\n return typeof chunk === 'string' ? chunk : this.textDecoder.decode(chunk, { stream: true });\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// Copyright 2022 Foursquare Labs, Inc.\nimport { makeArrayRowIterator, getTableNumCols } from '@loaders.gl/schema';\nimport { csvFormatRows } from 'd3-dsv';\n/**\n * Encode a Table object as CSV\n */\nexport function encodeTableAsCSV(table, options = { csv: { useDisplayNames: true } }) {\n const useDisplayNames = options.useDisplayNames || options.csv?.useDisplayNames;\n const fields = table.schema?.fields || [];\n const columnNames = fields.map((f) => {\n // This is a leaky abstraction, assuming Kepler metadata\n const displayName = f.metadata?.displayName;\n return useDisplayNames && typeof displayName === 'string' ? displayName : f.name;\n });\n const formattedData = [columnNames];\n for (const row of makeArrayRowIterator(table)) {\n const formattedRow = [];\n for (let columnIndex = 0; columnIndex < getTableNumCols(table); ++columnIndex) {\n const value = row[columnIndex];\n formattedRow[columnIndex] = preformatFieldValue(value);\n }\n formattedData.push(formattedRow);\n }\n return csvFormatRows(formattedData);\n}\n/**\n * Stringifies a value\n * @todo Why is it called parse?\n */\nconst preformatFieldValue = (value) => {\n if (value === null || value === undefined) {\n // TODO: It would be nice to distinguish between missing values and the empty string\n // https://github.com/d3/d3-dsv/issues/84\n return null;\n }\n if (value instanceof Date) {\n // d3-dsv formats dates without timezones if they don't have time info;\n // this forces them to always use fully-qualified ISO time strings\n return value.toISOString();\n }\n if (typeof value === 'object') {\n return JSON.stringify(value);\n }\n return String(value);\n};\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { encodeTableAsCSV } from \"./lib/encoders/encode-csv.js\";\nexport const CSVWriter = {\n id: 'csv',\n version: 'latest',\n module: 'csv',\n name: 'CSV',\n extensions: ['csv'],\n mimeTypes: ['text/csv'],\n options: {\n csv: {\n useDisplayNames: false\n }\n },\n text: true,\n encode: async (table, options) => new TextEncoder().encode(encodeTableAsCSV(table, options)).buffer,\n encodeTextSync: (table, options) => encodeTableAsCSV(table, options)\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACGA,oBAAqF;;;ACiBrF,IAAM,kBAAkB;AACxB,SAAS,UAAU,QAAQ,UAAU,CAAC,GAAG,WAAW,gBAAgB;AAChE,YAAU,WAAW,CAAC;AACtB,MAAI,gBAAgB,QAAQ,iBAAiB;AAC7C,MAAI,WAAW,aAAa,GAAG;AAC3B,YAAQ,wBAAwB;AAEhC,oBAAgB,CAAC;AAAA,EACrB;AACA,UAAQ,gBAAgB;AACxB,UAAQ,YAAY,WAAW,QAAQ,SAAS,IAAI,QAAQ,YAAY;AACxE,MAAI,WAAW,IAAI,SAAS,OAAO;AACnC,SAAO,SAAS,OAAO,MAAM;AACjC;AACA,SAAS,UAAU,QAAQ,SAAS;AAGhC,MAAI,UAAU;AAEd,MAAI,eAAe;AAEnB,MAAI,aAAa;AAEjB,MAAI,WAAW;AAEf,MAAI,aAAa;AAEjB,MAAI,gBAAgB,aAAa;AAEjC,MAAI,kBAAkB;AAEtB,MAAI,WAAW;AACf,eAAa;AACb,MAAI,iBAAiB,IAAI,OAAO,aAAa,UAAU,GAAG,GAAG;AAC7D,MAAI,OAAO,WAAW;AAClB,aAAS,KAAK,MAAM,MAAM;AAC9B,MAAI,MAAM,QAAQ,MAAM,GAAG;AACvB,QAAI,CAAC,OAAO,UAAU,MAAM,QAAQ,OAAO,CAAC,CAAC;AACzC,aAAO,UAAU,MAAM,QAAQ,eAAe;AAAA,aACzC,OAAO,OAAO,CAAC,MAAM;AAC1B,aAAO,UAAU,YAAY,OAAO,KAAK,OAAO,CAAC,CAAC,GAAG,QAAQ,eAAe;AAAA,EACpF,WACS,OAAO,WAAW,UAAU;AACjC,QAAI,OAAO,OAAO,SAAS;AACvB,aAAO,OAAO,KAAK,MAAM,OAAO,IAAI;AACxC,QAAI,MAAM,QAAQ,OAAO,IAAI,GAAG;AAC5B,UAAI,CAAC,OAAO;AACR,eAAO,SAAS,OAAO,QAAQ,OAAO,KAAK;AAC/C,UAAI,CAAC,OAAO;AACR,eAAO,SAAS,MAAM,QAAQ,OAAO,KAAK,CAAC,CAAC,IAAI,OAAO,SAAS,OAAO,KAAK,OAAO,KAAK,CAAC,CAAC;AAC9F,UAAI,CAAC,MAAM,QAAQ,OAAO,KAAK,CAAC,CAAC,KAAK,OAAO,OAAO,KAAK,CAAC,MAAM;AAC5D,eAAO,OAAO,CAAC,OAAO,IAAI;AAAA,IAClC;AACA,WAAO,UAAU,OAAO,UAAU,CAAC,GAAG,OAAO,QAAQ,CAAC,GAAG,eAAe;AAAA,EAC5E;AAEA,QAAM,IAAI,MAAM,wCAAwC;AACxD,WAAS,eAAe;AACpB,QAAI,OAAO,YAAY;AACnB;AACJ,QAAI,OAAO,QAAQ,cAAc,YAC7B,CAAC,KAAK,eAAe,OAAO,SAAU,OAAO;AACzC,aAAO,QAAQ,UAAU,QAAQ,KAAK,MAAM;AAAA,IAChD,CAAC,EAAE,QAAQ;AACX,mBAAa,QAAQ;AAAA,IACzB;AACA,QAAI,OAAO,QAAQ,WAAW,aAAa,MAAM,QAAQ,QAAQ,MAAM;AACnE,gBAAU,QAAQ;AACtB,QAAI,OAAO,QAAQ,mBAAmB,aAAa,OAAO,QAAQ,mBAAmB;AACjF,wBAAkB,QAAQ;AAC9B,QAAI,OAAO,QAAQ,YAAY;AAC3B,iBAAW,QAAQ;AACvB,QAAI,OAAO,QAAQ,cAAc;AAC7B,mBAAa,QAAQ;AACzB,QAAI,OAAO,QAAQ,WAAW;AAC1B,qBAAe,QAAQ;AAC3B,QAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAChC,UAAI,QAAQ,QAAQ,WAAW;AAC3B,cAAM,IAAI,MAAM,yBAAyB;AAC7C,iBAAW,QAAQ;AAAA,IACvB;AACA,QAAI,QAAQ,eAAe,QAAW;AAClC,sBAAgB,QAAQ,aAAa;AAAA,IACzC;AAAA,EACJ;AAEA,WAAS,UAAU,QAAQ,MAAM,gBAAgB;AAC7C,QAAI,MAAM;AACV,QAAI,OAAO,WAAW;AAClB,eAAS,KAAK,MAAM,MAAM;AAC9B,QAAI,OAAO,SAAS;AAChB,aAAO,KAAK,MAAM,IAAI;AAC1B,QAAI,YAAY,MAAM,QAAQ,MAAM,KAAK,OAAO,SAAS;AACzD,QAAI,mBAAmB,CAAC,MAAM,QAAQ,KAAK,CAAC,CAAC;AAE7C,QAAI,aAAa,cAAc;AAC3B,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,YAAI,IAAI;AACJ,iBAAO;AACX,eAAO,KAAK,OAAO,CAAC,GAAG,CAAC;AAAA,MAC5B;AACA,UAAI,KAAK,SAAS;AACd,eAAO;AAAA,IACf;AAEA,aAAS,MAAM,GAAG,MAAM,KAAK,QAAQ,OAAO;AACxC,UAAI,SAAS,YAAY,OAAO,SAAS,KAAK,GAAG,EAAE;AACnD,UAAI,YAAY;AAChB,UAAI,WAAW,YAAY,OAAO,KAAK,KAAK,GAAG,CAAC,EAAE,WAAW,IAAI,KAAK,GAAG,EAAE,WAAW;AACtF,UAAI,kBAAkB,CAAC,WAAW;AAC9B,oBACI,mBAAmB,WACb,KAAK,GAAG,EAAE,KAAK,EAAE,EAAE,KAAK,MAAM,KAC9B,KAAK,GAAG,EAAE,WAAW,KAAK,KAAK,GAAG,EAAE,CAAC,EAAE,WAAW;AAAA,MAChE;AACA,UAAI,mBAAmB,YAAY,WAAW;AAC1C,YAAI,OAAO,CAAC;AACZ,iBAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC7B,cAAI,KAAK,mBAAmB,OAAO,CAAC,IAAI;AACxC,eAAK,KAAK,KAAK,GAAG,EAAE,EAAE,CAAC;AAAA,QAC3B;AACA,oBAAY,KAAK,KAAK,EAAE,EAAE,KAAK,MAAM;AAAA,MACzC;AACA,UAAI,CAAC,WAAW;AACZ,iBAAS,MAAM,GAAG,MAAM,QAAQ,OAAO;AACnC,cAAI,MAAM,KAAK,CAAC;AACZ,mBAAO;AACX,cAAI,SAAS,aAAa,mBAAmB,OAAO,GAAG,IAAI;AAC3D,iBAAO,KAAK,KAAK,GAAG,EAAE,MAAM,GAAG,GAAG;AAAA,QACtC;AACA,YAAI,MAAM,KAAK,SAAS,MAAM,CAAC,kBAAmB,SAAS,KAAK,CAAC,WAAY;AACzE,iBAAO;AAAA,QACX;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAEA,WAAS,KAAK,KAAK,KAAK;AACpB,QAAI,OAAO,QAAQ,eAAe,QAAQ;AACtC,aAAO;AACX,QAAI,IAAI,gBAAgB;AACpB,aAAO,KAAK,UAAU,GAAG,EAAE,MAAM,GAAG,EAAE;AAC1C,UAAM,IAAI,SAAS,EAAE,QAAQ,gBAAgB,aAAa;AAC1D,QAAI,cAAe,OAAO,YAAY,aAAa,WAC9C,MAAM,QAAQ,OAAO,KAAK,QAAQ,GAAG,KACtC,OAAO,KAAK,KAAK,cAAc,KAC/B,IAAI,QAAQ,UAAU,IAAI,MAC1B,IAAI,OAAO,CAAC,MAAM,OAClB,IAAI,OAAO,IAAI,SAAS,CAAC,MAAM;AACnC,WAAO,cAAc,aAAa,MAAM,aAAa;AAAA,EACzD;AACA,WAAS,OAAO,KAAK,YAAY;AAC7B,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ;AACnC,UAAI,IAAI,QAAQ,WAAW,CAAC,CAAC,IAAI;AAC7B,eAAO;AACf,WAAO;AAAA,EACX;AACJ;AAEA,IAAM,gBAAN,MAAoB;AAAA,EAChB;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,SAAS;AAAA,EACT,aAAa;AAAA,EACb,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,eAAe;AAAA,EACf,mBAAmB;AAAA,IACf,MAAM,CAAC;AAAA,IACP,QAAQ,CAAC;AAAA,IACT,MAAM,CAAC;AAAA,EACX;AAAA,EACA,YAAY,QAAQ;AAEhB,QAAI,aAAa,EAAE,GAAG,OAAO;AAE7B,eAAW,YAAY,SAAS,WAAW,SAAS;AACpD,QAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,OAAO;AAC/B,iBAAW,YAAY;AAAA,IAC3B;AACA,SAAK,UAAU,IAAI,aAAa,UAAU;AAC1C,SAAK,QAAQ,WAAW;AACxB,SAAK,UAAU;AAAA,EACnB;AAAA,EACA,WAAW,OAAO,aAAa;AAE3B,QAAI,KAAK,gBAAgB,WAAW,KAAK,QAAQ,gBAAgB,GAAG;AAChE,UAAI,gBAAgB,KAAK,QAAQ,iBAAiB,KAAK;AACvD,UAAI,kBAAkB;AAClB,gBAAQ;AAAA,IAChB;AACA,SAAK,eAAe;AAEpB,QAAI,YAAY,KAAK,eAAe;AACpC,SAAK,eAAe;AACpB,QAAI,UAAU,KAAK,QAAQ,MAAM,WAAW,KAAK,YAAY,CAAC,KAAK,SAAS;AAC5E,QAAI,KAAK,QAAQ,OAAO,KAAK,KAAK,QAAQ,QAAQ;AAC9C;AACJ,QAAI,YAAY,QAAQ,KAAK;AAC7B,QAAI,CAAC,KAAK,WAAW;AACjB,WAAK,eAAe,UAAU,UAAU,YAAY,KAAK,UAAU;AACnE,WAAK,aAAa;AAAA,IACtB;AACA,QAAI,WAAW,QAAQ;AACnB,WAAK,aAAa,QAAQ,KAAK;AACnC,QAAI,2BAA2B,KAAK,aAAc,KAAK,QAAQ,WAAW,KAAK,aAAa,KAAK,QAAQ;AACzG,QAAI,WAAW,KAAK,QAAQ,KAAK,KAAK,CAAC,aAAa;AAChD,WAAK,QAAQ,MAAM,SAAS,KAAK,OAAO;AACxC,UAAI,KAAK,QAAQ,OAAO,KAAK,KAAK,QAAQ,QAAQ;AAC9C;AACJ,gBAAU;AAEV,WAAK,mBAAmB;AAAA,IAC5B;AACA,QAAI,CAAC,KAAK,QAAQ,QAAQ,CAAC,KAAK,QAAQ,OAAO;AAC3C,WAAK,iBAAiB,OAAO,KAAK,iBAAiB,KAAK,OAAO,QAAQ,IAAI;AAC3E,WAAK,iBAAiB,SAAS,KAAK,iBAAiB,OAAO,OAAO,QAAQ,MAAM;AACjF,WAAK,iBAAiB,OAAO,QAAQ;AAAA,IACzC;AACA,QAAI,CAAC,KAAK,cACN,4BACA,WAAW,KAAK,QAAQ,QAAQ,MAC/B,CAAC,WAAW,CAAC,QAAQ,KAAK,UAAU;AACrC,WAAK,QAAQ,SAAS,KAAK,kBAAkB,KAAK,MAAM;AACxD,WAAK,aAAa;AAAA,IACtB;AAEA,WAAO;AAAA,EACX;AAAA,EACA,WAAW,OAAO;AACd,QAAI,WAAW,KAAK,QAAQ,KAAK;AAC7B,WAAK,QAAQ,MAAM,KAAK;AAAA,EAChC;AACJ;AACA,IAAM,iBAAN,cAA6B,cAAc;AAAA,EACvC;AAAA,EACA,YAAY,SAAS,CAAC,GAAG;AACrB,UAAM,MAAM;AAAA,EAChB;AAAA,EACA,OAAO,GAAG;AACN,SAAK,YAAY;AACjB,WAAO,KAAK,WAAW;AAAA,EAC3B;AAAA,EACA,aAAa;AACT,QAAI,KAAK;AACL;AACJ,QAAI,OAAO,KAAK,QAAQ;AACxB,QAAI,QAAQ,OAAO,KAAK,UAAU,OAAO,GAAG,IAAI,IAAI,KAAK;AACzD,SAAK,YAAY,OAAO,KAAK,UAAU,OAAO,IAAI,IAAI;AACtD,SAAK,YAAY,CAAC,KAAK;AACvB,WAAO,KAAK,WAAW,KAAK;AAAA,EAChC;AACJ;AACA,IAAM,QAAQ;AACd,IAAM,WAAW;AAEjB,IAAM,eAAN,MAAmB;AAAA,EACf;AAAA;AAAA,EAEA,eAAe;AAAA;AAAA,EAEf,cAAc;AAAA;AAAA,EAEd;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,kBAAkB;AAAA;AAAA,EAElB,UAAU,CAAC;AAAA;AAAA,EAEX,WAAW;AAAA,IACP,MAAM,CAAC;AAAA,IACP,QAAQ,CAAC;AAAA,IACT,MAAM,CAAC;AAAA,EACX;AAAA,EACA,YAAY,SAAS;AAEjB,QAAI,WAAW,QAAQ,IAAI,GAAG;AAC1B,UAAI,WAAW,QAAQ;AACvB,cAAQ,OAAO,CAAC,YAAY;AACxB,aAAK,WAAW;AAChB,YAAI,KAAK,eAAe,GAAG;AACvB,eAAK,eAAe;AAAA,QACxB,OAEK;AACD,eAAK,eAAe;AAEpB,cAAI,CAAC,KAAK,SAAS,QAAQ,KAAK,SAAS,KAAK,WAAW;AACrD;AACJ,eAAK,gBAAgB,QAAQ,KAAK;AAClC,cAAI,QAAQ,WAAW,KAAK,eAAe,QAAQ,SAAS;AACxD,iBAAK,QAAQ,MAAM;AAAA,UACvB,OACK;AACD,qBAAS,KAAK,UAAU,IAAI;AAAA,UAChC;AAAA,QACJ;AAAA,MACJ;AAAA,IACJ;AACA,SAAK,UAAU;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,WAAW,eAAe;AACnC,QAAI,YAAY,KAAK,QAAQ,aAAa;AAC1C,QAAI,CAAC,KAAK,QAAQ;AACd,WAAK,QAAQ,UAAU,iBAAiB,OAAO,SAAS;AAC5D,SAAK,kBAAkB;AACvB,QAAI,CAAC,KAAK,QAAQ,WAAW;AACzB,UAAI,aAAa,KAAK,eAAe,OAAO,KAAK,QAAQ,SAAS,KAAK,QAAQ,gBAAgB,KAAK,QAAQ,UAAU,KAAK,QAAQ,iBAAiB;AACpJ,UAAI,WAAW,YAAY;AACvB,aAAK,QAAQ,YAAY,WAAW;AAAA,MACxC,OACK;AACD,aAAK,kBAAkB;AACvB,aAAK,QAAQ,YAAY,KAAK;AAAA,MAClC;AACA,WAAK,SAAS,KAAK,YAAY,KAAK,QAAQ;AAAA,IAChD,WACS,WAAW,KAAK,QAAQ,SAAS,GAAG;AACzC,WAAK,QAAQ,YAAY,KAAK,QAAQ,UAAU,KAAK;AACrD,WAAK,SAAS,KAAK,YAAY,KAAK,QAAQ;AAAA,IAChD;AACA,QAAI,eAAe,KAAK,KAAK,OAAO;AACpC,QAAI,KAAK,QAAQ,WAAW,KAAK,QAAQ;AACrC,mBAAa;AACjB,SAAK,SAAS;AACd,SAAK,UAAU,IAAI,OAAO,YAAY;AACtC,SAAK,WAAW,KAAK,QAAQ,MAAM,KAAK,QAAQ,WAAW,aAAa;AACxE,SAAK,eAAe;AACpB,WAAO,KAAK,UAAU,EAAE,MAAM,EAAE,QAAQ,KAAK,EAAE,IAAI,KAAK,YAAY,EAAE,MAAM,EAAE,QAAQ,MAAM,EAAE;AAAA,EAClG;AAAA,EACA,SAAS;AACL,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,QAAQ;AACJ,SAAK,UAAU;AACf,SAAK,QAAQ,MAAM;AACnB,SAAK,SAAS,KAAK,OAAO,OAAO,KAAK,QAAQ,aAAa,CAAC;AAAA,EAChE;AAAA,EACA,SAAS;AACL,SAAK,UAAU;AAEf,SAAK,SAAS,WAAW,KAAK,QAAQ,IAAI;AAAA,EAC9C;AAAA,EACA,UAAU;AACN,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,QAAQ;AACJ,SAAK,WAAW;AAChB,SAAK,QAAQ,MAAM;AACnB,SAAK,SAAS,KAAK,UAAU;AAC7B,QAAI,WAAW,KAAK,QAAQ,QAAQ,GAAG;AACnC,WAAK,QAAQ,SAAS,KAAK,QAAQ;AAAA,IACvC;AACA,SAAK,SAAS;AAAA,EAClB;AAAA,EACA,cAAc,GAAG;AACb,WAAO,KAAK,QAAQ,mBAAmB,WACjC,EAAE,KAAK,EAAE,EAAE,KAAK,MAAM,KACtB,EAAE,WAAW,KAAK,EAAE,CAAC,EAAE,WAAW;AAAA,EAC5C;AAAA,EACA,iBAAiB;AACb,QAAI,KAAK,YAAY,KAAK,iBAAiB;AACvC,WAAK,SAAS,aAAa,yBAAyB,+DAA+D,KAAK,mBAAmB,GAAG;AAC9I,WAAK,kBAAkB;AAAA,IAC3B;AACA,QAAI,KAAK,QAAQ,gBAAgB;AAC7B,eAAS,IAAI,GAAG,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC3C,YAAI,KAAK,cAAc,KAAK,SAAS,KAAK,CAAC,CAAC;AACxC,eAAK,SAAS,KAAK,OAAO,KAAK,CAAC;AAAA,IAC5C;AACA,QAAI,KAAK,eAAe,GAAG;AACvB,WAAK,iBAAiB;AAAA,IAC1B;AACA,WAAO,KAAK,6CAA6C;AAAA,EAC7D;AAAA,EACA,iBAAiB;AACb,WAAO,KAAK,QAAQ,UAAU,KAAK,QAAQ,WAAW;AAAA,EAC1D;AAAA,EACA,mBAAmB;AACf,QAAI,CAAC,KAAK;AACN;AACJ,UAAM,WAAW,CAAC,WAAW;AACzB,UAAI,WAAW,KAAK,QAAQ,eAAe;AACvC,iBAAS,KAAK,QAAQ,gBAAgB,MAAM;AAChD,WAAK,QAAQ,KAAK,MAAM;AAAA,IAC5B;AACA,QAAI,MAAM,QAAQ,KAAK,SAAS,KAAK,CAAC,CAAC,GAAG;AACtC,eAAS,IAAI,GAAG,KAAK,eAAe,KAAK,IAAI,KAAK,SAAS,KAAK,QAAQ;AACpE,aAAK,SAAS,KAAK,CAAC,EAAE,QAAQ,QAAQ;AAC1C,WAAK,SAAS,KAAK,OAAO,GAAG,CAAC;AAAA,IAClC,OAEK;AACD,WAAK,SAAS,KAAK,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACJ;AAAA,EACA,yBAAyB,OAAO;AAE5B,QAAI,KAAK,QAAQ,yBAAyB,KAAK,QAAQ,cAAc,KAAK,MAAM,QAAW;AACvF,WAAK,QAAQ,cAAc,KAAK,IAAI,KAAK,QAAQ,sBAAsB,KAAK;AAAA,IAChF;AACA,YAAQ,KAAK,QAAQ,cAAc,KAAK,KAAK,KAAK,QAAQ,mBAAmB;AAAA,EACjF;AAAA,EACA,aAAa,OAAO,OAAO;AACvB,QAAI,KAAK,yBAAyB,KAAK,GAAG;AACtC,UAAI,UAAU,UAAU,UAAU;AAC9B,eAAO;AAAA,eACF,UAAU,WAAW,UAAU;AACpC,eAAO;AAAA,eACF,MAAM,KAAK,KAAK;AACrB,eAAO,WAAW,KAAK;AAAA,eAClB,SAAS,KAAK,KAAK;AACxB,eAAO,IAAI,KAAK,KAAK;AAAA;AAErB,eAAO,UAAU,KAAK,OAAO;AAAA,IACrC;AACA,WAAO;AAAA,EACX;AAAA,EACA,+CAA+C;AAC3C,QAAI,CAAC,KAAK,YACN,CAAC,KAAK,SAAS,QACd,CAAC,KAAK,QAAQ,UAAU,CAAC,KAAK,QAAQ,iBAAiB,CAAC,KAAK,QAAQ,WAAY;AAClF,aAAO,KAAK;AAAA,IAChB;AACA,QAAI,cAAc;AAClB,QAAI,CAAC,KAAK,SAAS,KAAK,CAAC,KAAK,MAAM,QAAQ,KAAK,SAAS,KAAK,CAAC,CAAC,GAAG;AAChE,WAAK,SAAS,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK,WAAW,KAAK,IAAI,CAAC;AACtE,oBAAc,KAAK,SAAS,KAAK;AAAA,IACrC,OACK;AAED,WAAK,SAAS,OAAO,KAAK,WAAW,KAAK,SAAS,MAAM,CAAC;AAAA,IAC9D;AACA,QAAI,KAAK,QAAQ,UAAU,KAAK,SAAS;AACrC,WAAK,SAAS,KAAK,SAAS,KAAK;AACrC,SAAK,eAAe;AACpB,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,WAAW,WAAW,GAAG;AACrB,QAAI,MAAM,KAAK,QAAQ,SAAS,CAAC,IAAI,CAAC;AACtC,QAAI;AACJ,SAAK,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACnC,UAAI,QAAQ;AACZ,UAAI,QAAQ,UAAU,CAAC;AACvB,UAAI,KAAK,QAAQ;AACb,gBAAQ,KAAK,KAAK,QAAQ,SAAS,mBAAmB,KAAK,QAAQ,CAAC;AACxE,UAAI,KAAK,QAAQ;AACb,gBAAQ,KAAK,QAAQ,UAAU,OAAO,KAAK;AAC/C,cAAQ,KAAK,aAAa,OAAO,KAAK;AACtC,UAAI,UAAU,kBAAkB;AAC5B,YAAI,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC;AAC5B,YAAI,KAAK,EAAE,KAAK,KAAK;AAAA,MACzB;AAEI,YAAI,KAAK,IAAI;AAAA,IACrB;AACA,QAAI,KAAK,QAAQ,QAAQ;AACrB,UAAI,IAAI,KAAK,QAAQ;AACjB,aAAK,SAAS,iBAAiB,iBAAiB,+BAA+B,KAAK,QAAQ,SAAS,wBAAwB,GAAG,KAAK,cAAc,CAAC;AAAA,eAC/I,IAAI,KAAK,QAAQ;AACtB,aAAK,SAAS,iBAAiB,gBAAgB,8BAA8B,KAAK,QAAQ,SAAS,wBAAwB,GAAG,KAAK,cAAc,CAAC;AAAA,IAC1J;AACA,WAAO;AAAA,EACX;AAAA,EACA,eAAe,OAAO,SAAS,gBAAgB,UAAU,mBAAmB;AACxE,QAAI,WAAW,WAAW;AAC1B,wBAAoB,qBAAqB,CAAC,KAAK,KAAM,KAAK,KAAK,KAAK,YAAY,KAAK,QAAQ;AAC7F,aAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AAC/C,UAAI,QAAQ,kBAAkB,CAAC;AAC/B,UAAI,QAAQ,GAAG,gBAAgB,GAAG,kBAAkB;AACpD,0BAAoB;AACpB,UAAI,UAAU,IAAI,OAAO;AAAA,QACrB;AAAA,QACA,WAAW;AAAA,QACX;AAAA,QACA,SAAS;AAAA,MACb,CAAC,EAAE,MAAM,KAAK;AACd,eAAS,IAAI,GAAG,IAAI,QAAQ,KAAK,QAAQ,KAAK;AAC1C,YAAI,kBAAkB,KAAK,cAAc,QAAQ,KAAK,CAAC,CAAC,GAAG;AACvD;AACA;AAAA,QACJ;AACA,YAAI,aAAa,QAAQ,KAAK,CAAC,EAAE;AACjC,yBAAiB;AACjB,YAAI,OAAO,sBAAsB,aAAa;AAC1C,8BAAoB;AACpB;AAAA,QACJ,WACS,aAAa,GAAG;AACrB,mBAAS,KAAK,IAAI,aAAa,iBAAiB;AAChD,8BAAoB;AAAA,QACxB;AAAA,MACJ;AACA,UAAI,QAAQ,KAAK,SAAS;AACtB,yBAAiB,QAAQ,KAAK,SAAS;AAC3C,WAAK,OAAO,cAAc,eAAe,QAAQ,cAAc,gBAAgB,MAAM;AACjF,oBAAY;AACZ,oBAAY;AAAA,MAChB;AAAA,IACJ;AACA,SAAK,QAAQ,YAAY;AACzB,WAAO;AAAA,MACH,YAAY,CAAC,CAAC;AAAA,MACd,eAAe;AAAA,IACnB;AAAA,EACJ;AAAA,EACA,SAAS,MAAM,MAAM,KAAK,KAAK;AAC3B,SAAK,SAAS,OAAO,KAAK;AAAA,MACtB;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT;AAAA,IACJ,CAAC;AAAA,EACL;AACJ;AACA,SAAS,iBAAiB,OAAO,WAAW;AACxC,UAAQ,MAAM,OAAO,GAAG,OAAO,IAAI;AAEnC,MAAI,KAAK,IAAI,OAAO,aAAa,SAAS,IAAI,YAAY,aAAa,SAAS,GAAG,IAAI;AACvF,UAAQ,MAAM,QAAQ,IAAI,EAAE;AAC5B,MAAI,IAAI,MAAM,MAAM,IAAI;AACxB,MAAI,IAAI,MAAM,MAAM,IAAI;AACxB,MAAI,gBAAgB,EAAE,SAAS,KAAK,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE;AACvD,MAAI,EAAE,WAAW,KAAK;AAClB,WAAO;AACX,MAAI,WAAW;AACf,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AAC/B,QAAI,EAAE,CAAC,EAAE,CAAC,MAAM;AACZ;AAAA,EACR;AACA,SAAO,YAAY,EAAE,SAAS,IAAI,SAAS;AAC/C;AAEA,SAAS,aAAa,QAAQ;AAC1B,SAAO,OAAO,QAAQ,uBAAuB,MAAM;AACvD;AAEA,SAAS,OAAO,QAAQ;AAEpB,WAAS,UAAU,CAAC;AACpB,MAAI,QAAQ,OAAO;AACnB,MAAI,UAAU,OAAO;AACrB,MAAI,WAAW,OAAO;AACtB,MAAI,OAAO,OAAO;AAClB,MAAI,UAAU,OAAO;AACrB,MAAI,WAAW,OAAO;AACtB,MAAI;AAEJ,MAAI,OAAO,cAAc,QAAW;AAChC,gBAAY;AAAA,EAChB,OACK;AACD,gBAAY,OAAO;AAAA,EACvB;AACA,MAAI,aAAa;AACjB,MAAI,OAAO,eAAe,QAAW;AACjC,iBAAa,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,UAAU,YAAY,KAAK,eAAe,QAAQ,KAAK,IAAI;AAClE,YAAQ;AAEZ,MAAI,aAAa;AACb,UAAM,IAAI,MAAM,qCAAqC;AAAA,WAChD,aAAa;AAClB,eAAW;AAAA,WACN,OAAO,aAAa,YAAY,KAAK,eAAe,QAAQ,QAAQ,IAAI;AAC7E,eAAW;AAEf,MAAI,YAAY,QAAQ,YAAY,QAAQ,YAAY;AACpD,cAAU;AAEd,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,OAAK,QAAQ,SAAU,OAAO,WAAW,eAAe;AAEpD,QAAI,OAAO,UAAU;AACjB,YAAM,IAAI,MAAM,wBAAwB;AAG5C,QAAI,WAAW,MAAM,QAAQ,WAAW,MAAM,QAAQ,aAAa,QAAQ,QAAQ,cAAc,SAAS;AAC1G,QAAI,iBAAiB,WAAW,IAAI;AAEpC,aAAS;AACT,QAAI,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,MAAM,CAAC,GAAG,aAAa;AACnD,QAAI,CAAC;AACD,aAAO,WAAW;AACtB,QAAI,YAAa,aAAa,SAAS,MAAM,QAAQ,SAAS,MAAM,IAAK;AACrE,UAAI,OAAO,MAAM,MAAM,OAAO;AAC9B,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,cAAMA,OAAM,KAAK,CAAC;AAClB,kBAAUA,KAAI;AACd,YAAI,MAAM,KAAK,SAAS;AACpB,oBAAU,QAAQ;AAAA,iBACb;AACL,iBAAO,WAAW;AACtB,YAAI,YAAYA,KAAI,OAAO,GAAG,WAAW,MAAM;AAC3C;AACJ,YAAI,gBAAgB;AAChB,iBAAO,CAAC;AACR,kBAAQA,KAAI,MAAM,KAAK,CAAC;AACxB,iBAAO;AACP,cAAI;AACA,mBAAO,WAAW;AAAA,QAC1B;AAEI,kBAAQA,KAAI,MAAM,KAAK,CAAC;AAC5B,YAAI,WAAW,KAAK,SAAS;AACzB,iBAAO,KAAK,MAAM,GAAG,OAAO;AAC5B,iBAAO,WAAW,IAAI;AAAA,QAC1B;AAAA,MACJ;AACA,aAAO,WAAW;AAAA,IACtB;AACA,QAAI,YAAY,MAAM,QAAQ,OAAO,MAAM;AAC3C,QAAI,cAAc,MAAM,QAAQ,SAAS,MAAM;AAC/C,QAAI,iBAAiB,IAAI,OAAO,aAAa,UAAU,IAAI,aAAa,SAAS,GAAG,GAAG;AACvF,QAAI;AAEJ,eAAS;AAEL,UAAI,MAAM,MAAM,MAAM,WAAW;AAE7B,sBAAc;AAEd;AACA,mBAAS;AAEL,wBAAc,MAAM,QAAQ,WAAW,cAAc,CAAC;AAEtD,cAAI,gBAAgB,IAAI;AACpB,gBAAI,CAAC,eAAe;AAEhB,qBAAO,KAAK;AAAA,gBACR,MAAM;AAAA,gBACN,MAAM;AAAA,gBACN,SAAS;AAAA,gBACT,KAAK,KAAK;AAAA;AAAA,gBACV,OAAO;AAAA,cACX,CAAC;AAAA,YACL;AACA,mBAAO,OAAO;AAAA,UAClB;AAEA,cAAI,gBAAgB,WAAW,GAAG;AAC9B,gBAAI,QAAQ,MAAM,UAAU,QAAQ,WAAW,EAAE,QAAQ,gBAAgB,SAAS;AAClF,mBAAO,OAAO,KAAK;AAAA,UACvB;AAGA,cAAI,cAAc,cAAc,MAAM,cAAc,CAAC,MAAM,YAAY;AACnE;AACA;AAAA,UACJ;AAEA,cAAI,cAAc,cACd,gBAAgB,KAChB,MAAM,cAAc,CAAC,MAAM,YAAY;AACvC;AAAA,UACJ;AAEA,cAAI,YAAY,gBAAgB,KAAK,YAAY,KAAK,IAAI,WAAW,WAAW;AAChF,cAAI,iCAAiC,YAAY,SAAS;AAE1D,cAAI,MAAM,cAAc,IAAI,8BAA8B,MAAM,OAAO;AACnE,gBAAI,KAAK,MAAM,UAAU,QAAQ,WAAW,EAAE,QAAQ,gBAAgB,SAAS,CAAC;AAChF,qBAAS,cAAc,IAAI,iCAAiC;AAC5D,wBAAY,MAAM,QAAQ,OAAO,MAAM;AACvC,0BAAc,MAAM,QAAQ,SAAS,MAAM;AAC3C,gBAAI,gBAAgB;AAChB,qBAAO;AACP,kBAAI;AACA,uBAAO,WAAW;AAAA,YAC1B;AACA,gBAAI,WAAW,KAAK,UAAU;AAC1B,qBAAO,WAAW,IAAI;AAC1B;AAAA,UACJ;AACA,cAAI,+BAA+B,YAAY,WAAW;AAE1D,cAAI,MAAM,OAAO,cAAc,IAAI,8BAA8B,UAAU,MAAM,SAAS;AACtF,gBAAI,KAAK,MAAM,UAAU,QAAQ,WAAW,EAAE,QAAQ,gBAAgB,SAAS,CAAC;AAChF,oBAAQ,cAAc,IAAI,+BAA+B,UAAU;AACnE,wBAAY,MAAM,QAAQ,OAAO,MAAM;AACvC,gBAAI,gBAAgB;AAChB,qBAAO;AACP,kBAAI;AACA,uBAAO,WAAW;AAAA,YAC1B;AACA,gBAAI,WAAW,KAAK,UAAU;AAC1B,qBAAO,WAAW,IAAI;AAC1B;AAAA,UACJ;AAEA,iBAAO,KAAK;AAAA,YACR,MAAM;AAAA,YACN,MAAM;AAAA,YACN,SAAS;AAAA,YACT,KAAK,KAAK;AAAA;AAAA,YACV,OAAO;AAAA,UACX,CAAC;AACD;AACA;AAAA,QACJ;AACA,YAAI,gBAAgB;AAChB,iBAAO;AACP,cAAI;AACA,mBAAO,WAAW;AAAA,QAC1B;AACA,YAAI,WAAW,KAAK,UAAU;AAC1B,iBAAO,WAAW,IAAI;AAC1B;AAAA,MACJ;AAEA,UAAI,YAAY,IAAI,WAAW,KAAK,MAAM,OAAO,QAAQ,WAAW,MAAM,UAAU;AAChF,YAAI,gBAAgB;AAEhB,iBAAO,WAAW;AACtB,iBAAS,cAAc;AACvB,sBAAc,MAAM,QAAQ,SAAS,MAAM;AAC3C,oBAAY,MAAM,QAAQ,OAAO,MAAM;AACvC;AAAA,MACJ;AAEA,UAAI,cAAc,OAAO,YAAY,eAAe,gBAAgB,KAAK;AACrE,YAAI,KAAK,MAAM,UAAU,QAAQ,SAAS,CAAC;AAC3C,iBAAS,YAAY;AACrB,oBAAY,MAAM,QAAQ,OAAO,MAAM;AACvC;AAAA,MACJ;AAEA,UAAI,gBAAgB,IAAI;AACpB,YAAI,KAAK,MAAM,UAAU,QAAQ,WAAW,CAAC;AAC7C,gBAAQ,cAAc,UAAU;AAChC,YAAI,gBAAgB;AAChB,iBAAO;AACP,cAAI;AACA,mBAAO,WAAW;AAAA,QAC1B;AACA,YAAI,WAAW,KAAK,UAAU;AAC1B,iBAAO,WAAW,IAAI;AAC1B;AAAA,MACJ;AACA;AAAA,IACJ;AACA,WAAO,OAAO;AACd,aAAS,QAAQA,MAAK;AAClB,WAAK,KAAKA,IAAG;AACb,mBAAa;AAAA,IACjB;AAKA,aAAS,YAAY,OAAO;AACxB,UAAI,cAAc;AAClB,UAAI,UAAU,IAAI;AACd,YAAI,kCAAkC,MAAM,UAAU,cAAc,GAAG,KAAK;AAC5E,YAAI,mCAAmC,gCAAgC,KAAK,MAAM,IAAI;AAClF,wBAAc,gCAAgC;AAAA,QAClD;AAAA,MACJ;AACA,aAAO;AAAA,IACX;AAKA,aAAS,OAAOC,QAAO;AACnB,UAAI;AACA,eAAO,WAAW;AACtB,UAAI,OAAOA,WAAU;AACjB,QAAAA,SAAQ,MAAM,OAAO,MAAM;AAC/B,UAAI,KAAKA,MAAK;AACd,eAAS;AACT,cAAQ,GAAG;AACX,UAAI;AACA,eAAO;AACX,aAAO,WAAW;AAAA,IACtB;AAOA,aAAS,QAAQ,WAAW;AACxB,eAAS;AACT,cAAQ,GAAG;AACX,YAAM,CAAC;AACP,oBAAc,MAAM,QAAQ,SAAS,MAAM;AAAA,IAC/C;AAEA,aAAS,WAAW,SAASC,OAAM;AAC/B,UAAI,SAASA,SAAQ;AACrB,aAAO;AAAA,QACH,MAAM,SAAS,KAAK,CAAC,IAAI;AAAA,QACzB;AAAA,QACA,MAAM;AAAA,UACF,WAAW;AAAA,UACX,WAAW;AAAA,UACX;AAAA,UACA,WAAW,CAAC,CAAC;AAAA,UACb,QAAQ,cAAc,aAAa;AAAA,QACvC;AAAA,MACJ;AAAA,IACJ;AAEA,aAAS,SAAS;AACd,WAAK,WAAW,QAAW,IAAI,CAAC;AAChC,aAAO,CAAC;AACR,eAAS,CAAC;AAAA,IACd;AAAA,EACJ;AAGA,OAAK,QAAQ,WAAY;AACrB,cAAU;AAAA,EACd;AAGA,OAAK,eAAe,WAAY;AAC5B,WAAO;AAAA,EACX;AACJ;AAEA,SAAS,KAAK,KAAK;AACf,MAAI,OAAO,QAAQ,YAAY,QAAQ;AACnC,WAAO;AACX,MAAI,MAAM,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC;AACrC,WAAS,OAAO;AACZ,QAAI,GAAG,IAAI,KAAK,IAAI,GAAG,CAAC;AAC5B,SAAO;AACX;AACA,SAAS,WAAW,MAAM;AACtB,SAAO,OAAO,SAAS;AAC3B;AACA,IAAM,OAAO;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,YAAY,OAAO,aAAa,EAAE;AAAA,EAClC,UAAU,OAAO,aAAa,EAAE;AAAA,EAChC;AAAA,EACA,gBAAgB,CAAC,MAAM,MAAM,KAAK,eAAe;AAAA,EACjD,mBAAmB;AAAA;AAAA,EACnB,mBAAmB;AAAA;AAAA,EAEnB,gBAAgB,OAAO,OAAO;AAAA;AAAA,EAC9B,iBAAiB,OAAO,OAAO;AAAA;AAAA,EAC/B,kBAAkB;AAAA;AAAA;AAAA,EAElB;AAAA,EACA;AAAA;AAAA,EAEA;AACJ;AACA,IAAO,oBAAQ;;;ACr3Bf,IAAM,EAAE,eAAAC,eAAc,IAAI;AAC1B,IAAqB,wBAArB,cAAmDA,eAAc;AAAA,EAC7D,cAAc,IAAI,YAAY,KAAK,QAAQ,QAAQ;AAAA,EACnD,YAAY,SAAS,CAAC,GAAG;AACrB,UAAM,MAAM;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OAAO,eAAe;AACxB,SAAK,SAAS;AACd,QAAI;AAGA,uBAAiB,SAAS,eAAe;AACrC,aAAK,WAAW,KAAK,eAAe,KAAK,CAAC;AAAA,MAC9C;AAYA,WAAK,YAAY;AACjB,WAAK,WAAW,EAAE;AAAA,IACtB,SACO,OAAP;AAEI,WAAK,WAAW,KAAK;AAAA,IACzB;AAAA,EACJ;AAAA,EACA,aAAa;AAAA,EAEb;AAAA;AAAA,EAEA,eAAe,OAAO;AAClB,WAAO,OAAO,UAAU,WAAW,QAAQ,KAAK,YAAY,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAAA,EAC9F;AACJ;;;AF/CA,IAAM,UAAU,OAAiC,UAAU;AAC3D,IAAM,oBAAoB;AACnB,IAAM,YAAY;AAAA,EACrB,UAAU;AAAA,EACV,WAAW;AAAA,EACX,IAAI;AAAA,EACJ,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,YAAY,CAAC,OAAO,OAAO,KAAK;AAAA,EAChC,WAAW,CAAC,YAAY,6BAA6B,UAAU;AAAA,EAC/D,UAAU;AAAA,EACV,OAAO,OAAO,aAAa,YAAY,SAAS,IAAI,YAAY,EAAE,OAAO,WAAW,GAAG,OAAO;AAAA,EAC9F,WAAW,CAAC,MAAM,YAAY,SAAS,MAAM,OAAO;AAAA,EACpD,gBAAgB;AAAA;AAAA;AAAA,EAGhB,SAAS;AAAA,IACL,KAAK;AAAA,MACD,OAAO;AAAA;AAAA,MACP,qBAAqB;AAAA;AAAA,MAErB,QAAQ;AAAA,MACR,cAAc;AAAA;AAAA;AAAA,MAGd,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,eAAe;AAAA,MACf,UAAU;AAAA,MACV,gBAAgB;AAAA;AAAA,MAEhB,mBAAmB,CAAC,KAAK,KAAM,KAAK,GAAG;AAAA;AAAA,IAE3C;AAAA,EACJ;AACJ;AACA,eAAe,SAAS,SAAS,SAAS;AAEtC,QAAM,aAAa,EAAE,GAAG,UAAU,QAAQ,KAAK,GAAG,mCAAS,IAAI;AAC/D,QAAM,WAAW,aAAa,OAAO;AACrC,QAAM,SAAS,WAAW,WAAW,SAAS,YAAY,QAAQ,IAAI,QAAQ,WAAW,MAAM;AAC/F,QAAM,kBAAkB;AACxB,QAAM,kBAAkB;AAAA;AAAA,IAEpB,GAAG;AAAA,IACH,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IACV,iBAAiB,kBAAkB,2BAA2B,IAAI;AAAA,IAClE,OAAO,CAAC,MAAM;AACV,YAAM,IAAI,MAAM,CAAC;AAAA,IACrB;AAAA,EACJ;AACA,QAAM,SAAS,kBAAK,MAAM,SAAS,eAAe;AAClD,QAAM,OAAO,OAAO;AACpB,QAAM,YAAY,OAAO,KAAK,UAAU,eAAe,WAAW,cAAc,SAAS,MAAM;AAC/F,QAAM,QAAQ,WAAW,SAAS;AAClC,UAAQ,OAAO;AAAA,IACX,KAAK;AACD,aAAO;AAAA,QACH,OAAO;AAAA,QACP,MAAM,KAAK,IAAI,CAAC,QAAS,MAAM,QAAQ,GAAG,QAAI,kCAAmB,KAAK,SAAS,IAAI,GAAI;AAAA,MAC3F;AAAA,IACJ,KAAK;AACD,aAAO;AAAA,QACH,OAAO;AAAA,QACP,MAAM,KAAK,IAAI,CAAC,QAAS,MAAM,QAAQ,GAAG,IAAI,UAAM,iCAAkB,KAAK,SAAS,CAAE;AAAA,MAC1F;AAAA,IACJ;AACI,YAAM,IAAI,MAAM,KAAK;AAAA,EAC7B;AACJ;AAEA,SAAS,kBAAkB,eAAe,SAAS;AAG/C,YAAU,EAAE,GAAG,QAAQ;AACvB,MAAI,QAAQ,cAAc,QAAQ;AAC9B,YAAQ,YAAY;AAAA,EACxB;AAEA,QAAM,aAAa,EAAE,GAAG,UAAU,QAAQ,KAAK,GAAG,mCAAS,IAAI;AAC/D,QAAM,aAAa,IAAI,yBAAW;AAClC,MAAI,aAAa;AACjB,MAAI,YAAY;AAChB,MAAI,oBAAoB;AACxB,MAAI,SAAS;AACb,QAAM,SAAS;AAAA;AAAA,IAEX,GAAG;AAAA,IACH,QAAQ;AAAA;AAAA,IACR,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,IAIV,WAAW,OAAO,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMzB,gBAAgB;AAAA;AAAA;AAAA,IAGhB,KAAK,SAAS;AACV,UAAI,MAAM,QAAQ;AAClB,UAAI,WAAW,gBAAgB;AAE3B,cAAM,eAAe,IAAI,KAAK,EAAE,KAAK,EAAE,EAAE,KAAK;AAC9C,YAAI,iBAAiB,IAAI;AACrB;AAAA,QACJ;AAAA,MACJ;AACA,YAAM,YAAY,QAAQ,KAAK;AAE/B,UAAI,cAAc,CAAC,WAAW;AAE1B,cAAM,SAAS,WAAW,WAAW,SAAS,YAAY,GAAG,IAAI,QAAQ,WAAW,MAAM;AAC1F,YAAI,QAAQ;AACR,sBAAY,IAAI,IAAI,2BAA2B,CAAC;AAChD;AAAA,QACJ;AAAA,MACJ;AAEA,UAAI,YAAY;AACZ,qBAAa;AACb,YAAI,CAAC,WAAW;AACZ,sBAAY,eAAe,WAAW,cAAc,IAAI,MAAM;AAAA,QAClE;AACA,iBAAS,aAAa,KAAK,SAAS;AAAA,MACxC;AACA,UAAI,WAAW,qBAAqB;AAGhC,cAAM,KAAK,MAAM,KAAK,UAAU,GAAG,CAAC;AAAA,MACxC;AACA,YAAM,QAAQ,WAAW,SAAS;AAElC,0BACI,qBACI,IAAI;AAAA;AAAA,QAEJ;AAAA,QAAQ;AAAA,UACJ;AAAA,UACA,GAAG;AAAA,QACP;AAAA,MAAC;AACT,UAAI;AACA,0BAAkB,OAAO,GAAG;AAE5B,cAAM,QAAQ,qBAAqB,kBAAkB,aAAa,EAAE,UAAU,CAAC;AAC/E,YAAI,OAAO;AACP,qBAAW,QAAQ,KAAK;AAAA,QAC5B;AAAA,MACJ,SACO,OAAP;AACI,mBAAW,QAAQ,KAAK;AAAA,MAC5B;AAAA,IACJ;AAAA;AAAA,IAEA,SAAS,SAAS;AACd,UAAI;AACA,cAAM,YAAY,QAAQ,KAAK;AAE/B,cAAM,QAAQ,qBAAqB,kBAAkB,cAAc,EAAE,UAAU,CAAC;AAChF,YAAI,OAAO;AACP,qBAAW,QAAQ,KAAK;AAAA,QAC5B;AAAA,MACJ,SACO,OAAP;AACI,mBAAW,QAAQ,KAAK;AAAA,MAC5B;AACA,iBAAW,MAAM;AAAA,IACrB;AAAA,EACJ;AACA,oBAAK,MAAM,eAAe,QAAQ,qBAAqB;AAGvD,SAAO;AACX;AAMA,SAAS,YAAY,KAAK;AACtB,SAAO,OAAO,IAAI,MAAM,CAAC,UAAU,OAAO,UAAU,QAAQ;AAChE;AAMA,SAAS,aAAa,SAAS;AAC3B,QAAM,SAAS,kBAAK,MAAM,SAAS;AAAA,IAC/B,eAAe;AAAA,IACf,SAAS;AAAA,EACb,CAAC;AACD,SAAO,OAAO,KAAK,CAAC;AACxB;AAOA,SAAS,6BAA6B;AAClC,QAAM,kBAAkB,oBAAI,IAAI;AAChC,SAAO,CAAC,QAAQ;AACZ,QAAI,UAAU;AACd,QAAI,UAAU;AACd,WAAO,gBAAgB,IAAI,OAAO,GAAG;AACjC,gBAAU,GAAG,OAAO;AACpB;AAAA,IACJ;AACA,oBAAgB,IAAI,OAAO;AAC3B,WAAO;AAAA,EACX;AACJ;AAOA,SAAS,eAAe,cAAc,QAAQ,GAAG;AAC7C,QAAM,UAAU,CAAC;AACjB,WAAS,IAAI,GAAG,IAAI,OAAO,KAAK;AAC5B,YAAQ,KAAK,GAAG,eAAe,IAAI,GAAG;AAAA,EAC1C;AACA,SAAO;AACX;AACA,SAAS,aAAa,KAAK,WAAW;AAClC,QAAM,SAAS,YAAY,CAAC,IAAI,CAAC;AACjC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,UAAM,aAAc,aAAa,UAAU,CAAC,KAAM;AAClD,UAAM,QAAQ,IAAI,CAAC;AACnB,YAAQ,OAAO,OAAO;AAAA,MAClB,KAAK;AAAA,MACL,KAAK;AAED,eAAO,UAAU,IAAI,EAAE,MAAM,OAAO,UAAU,GAAG,OAAO,GAAG,MAAM,aAAa;AAC9E;AAAA,MACJ,KAAK;AAAA,MACL;AACI,eAAO,UAAU,IAAI,EAAE,MAAM,OAAO,UAAU,GAAG,OAAO,GAAG,MAAM,MAAM;AAAA,IAG/E;AAAA,EACJ;AACA,SAAO;AACX;;;AG9PA,IAAAC,iBAAsD;AACtD,oBAA8B;AAIvB,SAAS,iBAAiB,OAAO,UAAU,EAAE,KAAK,EAAE,iBAAiB,KAAK,EAAE,GAAG;AATtF;AAUI,QAAM,kBAAkB,QAAQ,qBAAmB,aAAQ,QAAR,mBAAa;AAChE,QAAM,WAAS,WAAM,WAAN,mBAAc,WAAU,CAAC;AACxC,QAAM,cAAc,OAAO,IAAI,CAAC,MAAM;AAZ1C,QAAAC;AAcQ,UAAM,eAAcA,MAAA,EAAE,aAAF,gBAAAA,IAAY;AAChC,WAAO,mBAAmB,OAAO,gBAAgB,WAAW,cAAc,EAAE;AAAA,EAChF,CAAC;AACD,QAAM,gBAAgB,CAAC,WAAW;AAClC,aAAW,WAAO,qCAAqB,KAAK,GAAG;AAC3C,UAAM,eAAe,CAAC;AACtB,aAAS,cAAc,GAAG,kBAAc,gCAAgB,KAAK,GAAG,EAAE,aAAa;AAC3E,YAAM,QAAQ,IAAI,WAAW;AAC7B,mBAAa,WAAW,IAAI,oBAAoB,KAAK;AAAA,IACzD;AACA,kBAAc,KAAK,YAAY;AAAA,EACnC;AACA,aAAO,6BAAc,aAAa;AACtC;AAKA,IAAM,sBAAsB,CAAC,UAAU;AACnC,MAAI,UAAU,QAAQ,UAAU,QAAW;AAGvC,WAAO;AAAA,EACX;AACA,MAAI,iBAAiB,MAAM;AAGvB,WAAO,MAAM,YAAY;AAAA,EAC7B;AACA,MAAI,OAAO,UAAU,UAAU;AAC3B,WAAO,KAAK,UAAU,KAAK;AAAA,EAC/B;AACA,SAAO,OAAO,KAAK;AACvB;;;AC3CO,IAAM,YAAY;AAAA,EACrB,IAAI;AAAA,EACJ,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,YAAY,CAAC,KAAK;AAAA,EAClB,WAAW,CAAC,UAAU;AAAA,EACtB,SAAS;AAAA,IACL,KAAK;AAAA,MACD,iBAAiB;AAAA,IACrB;AAAA,EACJ;AAAA,EACA,MAAM;AAAA,EACN,QAAQ,OAAO,OAAO,YAAY,IAAI,YAAY,EAAE,OAAO,iBAAiB,OAAO,OAAO,CAAC,EAAE;AAAA,EAC7F,gBAAgB,CAAC,OAAO,YAAY,iBAAiB,OAAO,OAAO;AACvE;",
|
|
6
6
|
"names": ["row", "value", "step", "ChunkStreamer", "import_schema", "_a"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/csv",
|
|
3
|
-
"version": "4.3.
|
|
3
|
+
"version": "4.3.2",
|
|
4
4
|
"description": "Framework-independent loader for CSV and DSV table formats",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -44,12 +44,12 @@
|
|
|
44
44
|
"build-bundle-dev": "ocular-bundle ./bundle.ts --env=dev --output=dist/dist.dev.js"
|
|
45
45
|
},
|
|
46
46
|
"dependencies": {
|
|
47
|
-
"@loaders.gl/loader-utils": "4.3.
|
|
48
|
-
"@loaders.gl/schema": "4.3.
|
|
47
|
+
"@loaders.gl/loader-utils": "4.3.2",
|
|
48
|
+
"@loaders.gl/schema": "4.3.2",
|
|
49
49
|
"d3-dsv": "^1.2.0"
|
|
50
50
|
},
|
|
51
51
|
"peerDependencies": {
|
|
52
52
|
"@loaders.gl/core": "^4.3.0"
|
|
53
53
|
},
|
|
54
|
-
"gitHead": "
|
|
54
|
+
"gitHead": "b4203b8703f64a38d6f79a3113bc7bb51d45c93a"
|
|
55
55
|
}
|