mirror of
https://github.com/deepseek-ai/awesome-deepseek-integration.git
synced 2025-04-05 11:24:03 +00:00
remove DS_Store
This commit is contained in:
parent
6bc0c27e40
commit
e76a72b97b
36 changed files with 39128 additions and 0 deletions
BIN
.DS_Store
vendored
BIN
.DS_Store
vendored
Binary file not shown.
BIN
docs/.DS_Store
vendored
BIN
docs/.DS_Store
vendored
Binary file not shown.
BIN
docs/ATTPs/.DS_Store
vendored
BIN
docs/ATTPs/.DS_Store
vendored
Binary file not shown.
13
node_modules/.package-lock.json
generated
vendored
Normal file
13
node_modules/.package-lock.json
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"name": "awesome-deepseek-integration",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"node_modules/csv-parse": {
|
||||||
|
"version": "5.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.6.0.tgz",
|
||||||
|
"integrity": "sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q==",
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/csv-parse/LICENSE
generated
vendored
Normal file
21
node_modules/csv-parse/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2010 Adaltas
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
82
node_modules/csv-parse/README.md
generated
vendored
Normal file
82
node_modules/csv-parse/README.md
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
# CSV parser for Node.js and the web
|
||||||
|
|
||||||
|
[](https://github.com/adaltas/node-csv/actions)
|
||||||
|
[](https://www.npmjs.com/package/csv-parse)
|
||||||
|
[](https://www.npmjs.com/package/csv-parse)
|
||||||
|
|
||||||
|
The [`csv-parse` package](https://csv.js.org/parse/) is a parser converting CSV text input into arrays or objects. It is part of the [CSV project](https://csv.js.org/).
|
||||||
|
|
||||||
|
It implements the Node.js [`stream.Transform` API](http://nodejs.org/api/stream.html#stream_class_stream_transform). It also provides a simple callback-based API for convenience. It is both extremely easy to use and powerful. It was first released in 2010 and is used against big data sets by a large community.
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
- [Project homepage](https://csv.js.org/parse/)
|
||||||
|
- [API](https://csv.js.org/parse/api/)
|
||||||
|
- [Options](https://csv.js.org/parse/options/)
|
||||||
|
- [Info properties](https://csv.js.org/parse/info/)
|
||||||
|
- [Common errors](https://csv.js.org/parse/errors/)
|
||||||
|
- [Examples](https://csv.js.org/project/examples/)
|
||||||
|
|
||||||
|
## Main features
|
||||||
|
|
||||||
|
- Flexible with lot of [options](https://csv.js.org/parse/options/)
|
||||||
|
- Multiple [distributions](https://csv.js.org/parse/distributions/): Node.js, Web, ECMAScript modules and CommonJS
|
||||||
|
- Follow the Node.js streaming API
|
||||||
|
- Simplicity with the optional callback API
|
||||||
|
- Support delimiters, quotes, escape characters and comments
|
||||||
|
- Line breaks discovery
|
||||||
|
- Support big datasets
|
||||||
|
- Complete test coverage and lot of samples for inspiration
|
||||||
|
- No external dependencies
|
||||||
|
- Work nicely with the [csv-generate](https://csv.js.org/generate/), [stream-transform](https://csv.js.org/transform/) and [csv-stringify](https://csv.js.org/stringify/) packages
|
||||||
|
- MIT License
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Run `npm install csv` to install the full CSV module or run `npm install csv-parse` if you are only interested by the CSV parser.
|
||||||
|
|
||||||
|
Use the callback and sync APIs for simplicity or the stream based API for scalability.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
The [API](https://csv.js.org/parse/api/) is available in multiple flavors. This example illustrates the stream API.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import assert from "assert";
|
||||||
|
import { parse } from "csv-parse";
|
||||||
|
|
||||||
|
const records = [];
|
||||||
|
// Initialize the parser
|
||||||
|
const parser = parse({
|
||||||
|
delimiter: ":",
|
||||||
|
});
|
||||||
|
// Use the readable stream api to consume records
|
||||||
|
parser.on("readable", function () {
|
||||||
|
let record;
|
||||||
|
while ((record = parser.read()) !== null) {
|
||||||
|
records.push(record);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Catch any error
|
||||||
|
parser.on("error", function (err) {
|
||||||
|
console.error(err.message);
|
||||||
|
});
|
||||||
|
// Test that the parsed records matched the expected records
|
||||||
|
parser.on("end", function () {
|
||||||
|
assert.deepStrictEqual(records, [
|
||||||
|
["root", "x", "0", "0", "root", "/root", "/bin/bash"],
|
||||||
|
["someone", "x", "1022", "1022", "", "/home/someone", "/bin/bash"],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
// Write data to the stream
|
||||||
|
parser.write("root:x:0:0:root:/root:/bin/bash\n");
|
||||||
|
parser.write("someone:x:1022:1022::/home/someone:/bin/bash\n");
|
||||||
|
// Close the readable stream
|
||||||
|
parser.end();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
|
The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France.
|
||||||
|
|
||||||
|
- David Worms: <https://github.com/wdavidw>
|
1889
node_modules/csv-parse/dist/cjs/index.cjs
generated
vendored
Normal file
1889
node_modules/csv-parse/dist/cjs/index.cjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
283
node_modules/csv-parse/dist/cjs/index.d.cts
generated
vendored
Normal file
283
node_modules/csv-parse/dist/cjs/index.d.cts
generated
vendored
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as stream from "stream";
|
||||||
|
|
||||||
|
export type Callback = (err: CsvError | undefined, records: any | undefined, info: Info) => void;
|
||||||
|
|
||||||
|
export interface Parser extends stream.Transform {}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
constructor(options: Options);
|
||||||
|
|
||||||
|
__push(line: any): any;
|
||||||
|
|
||||||
|
__write(chars: any, end: any, callback: any): any;
|
||||||
|
|
||||||
|
readonly options: Options
|
||||||
|
|
||||||
|
readonly info: Info;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CastingContext {
|
||||||
|
readonly column: number | string;
|
||||||
|
readonly empty_lines: number;
|
||||||
|
readonly error: CsvError;
|
||||||
|
readonly header: boolean;
|
||||||
|
readonly index: number;
|
||||||
|
readonly quoting: boolean;
|
||||||
|
readonly lines: number;
|
||||||
|
readonly records: number;
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CastingFunction = (value: string, context: CastingContext) => any;
|
||||||
|
|
||||||
|
export type CastingDateFunction = (value: string, context: CastingContext) => Date;
|
||||||
|
|
||||||
|
export type ColumnOption = string | undefined | null | false | { name: string };
|
||||||
|
|
||||||
|
/*
|
||||||
|
Note, could not `extends stream.TransformOptions` because encoding can be
|
||||||
|
BufferEncoding and undefined as well as null which is not defined in the
|
||||||
|
extended type.
|
||||||
|
*/
|
||||||
|
export interface Options {
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to native types.
|
||||||
|
* @deprecated Use {@link cast}
|
||||||
|
*/
|
||||||
|
auto_parse?: boolean | CastingFunction;
|
||||||
|
autoParse?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
|
||||||
|
* @deprecated Use {@link cast_date}
|
||||||
|
*/
|
||||||
|
auto_parse_date?: boolean | CastingDateFunction;
|
||||||
|
autoParseDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
|
||||||
|
*/
|
||||||
|
bom?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to native types.
|
||||||
|
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
|
||||||
|
*/
|
||||||
|
cast?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to dates.
|
||||||
|
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
|
||||||
|
*/
|
||||||
|
cast_date?: boolean | CastingDateFunction;
|
||||||
|
castDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* List of fields as an array,
|
||||||
|
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
|
||||||
|
* default to null,
|
||||||
|
* affect the result data set in the sense that records will be objects instead of arrays.
|
||||||
|
*/
|
||||||
|
columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
|
||||||
|
/**
|
||||||
|
* Convert values into an array of values when columns are activated and
|
||||||
|
* when multiple columns of the same name are found.
|
||||||
|
*/
|
||||||
|
group_columns_by_name?: boolean;
|
||||||
|
groupColumnsByName?: boolean;
|
||||||
|
/**
|
||||||
|
* Treat all the characters after this one as a comment, default to '' (disabled).
|
||||||
|
*/
|
||||||
|
comment?: string;
|
||||||
|
/**
|
||||||
|
* Restrict the definition of comments to a full line. Comment characters
|
||||||
|
* defined in the middle of the line are not interpreted as such. The
|
||||||
|
* option require the activation of comments.
|
||||||
|
*/
|
||||||
|
comment_no_infix?: boolean;
|
||||||
|
/**
|
||||||
|
* Set the field delimiter. One character only, defaults to comma.
|
||||||
|
*/
|
||||||
|
delimiter?: string | string[] | Buffer;
|
||||||
|
/**
|
||||||
|
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
|
||||||
|
*/
|
||||||
|
encoding?: BufferEncoding | undefined;
|
||||||
|
/**
|
||||||
|
* Set the escape character, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
escape?: string | null | false | Buffer;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested number of records.
|
||||||
|
*/
|
||||||
|
from?: number;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested line number.
|
||||||
|
*/
|
||||||
|
from_line?: number;
|
||||||
|
fromLine?: number;
|
||||||
|
/**
|
||||||
|
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
|
||||||
|
*/
|
||||||
|
ignore_last_delimiters?: boolean | number;
|
||||||
|
/**
|
||||||
|
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
|
||||||
|
*/
|
||||||
|
info?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
ltrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
|
||||||
|
* used to guard against a wrong delimiter or record_delimiter,
|
||||||
|
* default to 128000 characters.
|
||||||
|
*/
|
||||||
|
max_record_size?: number;
|
||||||
|
maxRecordSize?: number;
|
||||||
|
/**
|
||||||
|
* Name of header-record title to name objects by.
|
||||||
|
*/
|
||||||
|
objname?: string;
|
||||||
|
/**
|
||||||
|
* Alter and filter records by executing a user defined function.
|
||||||
|
*/
|
||||||
|
on_record?: (record: any, context: CastingContext) => any;
|
||||||
|
onRecord?: (record: any, context: CastingContext) => any;
|
||||||
|
/**
|
||||||
|
* Optional character surrounding a field, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
quote?: string | boolean | Buffer | null;
|
||||||
|
/**
|
||||||
|
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
|
||||||
|
*/
|
||||||
|
raw?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count?: boolean;
|
||||||
|
relaxColumnCount?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_less?: boolean;
|
||||||
|
relaxColumnCountLess?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_more?: boolean;
|
||||||
|
relaxColumnCountMore?: boolean;
|
||||||
|
/**
|
||||||
|
* Preserve quotes inside unquoted field.
|
||||||
|
*/
|
||||||
|
relax_quotes?: boolean;
|
||||||
|
relaxQuotes?: boolean;
|
||||||
|
/**
|
||||||
|
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
|
||||||
|
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
|
||||||
|
*/
|
||||||
|
record_delimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
recordDelimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
rtrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Dont generate empty values for empty lines.
|
||||||
|
* Defaults to false
|
||||||
|
*/
|
||||||
|
skip_empty_lines?: boolean;
|
||||||
|
skipEmptyLines?: boolean;
|
||||||
|
/**
|
||||||
|
* Skip a line with error found inside and directly go process the next line.
|
||||||
|
*/
|
||||||
|
skip_records_with_error?: boolean;
|
||||||
|
skipRecordsWithError?: boolean;
|
||||||
|
/**
|
||||||
|
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
|
||||||
|
*/
|
||||||
|
skip_records_with_empty_values?: boolean;
|
||||||
|
skipRecordsWithEmptyValues?: boolean;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested number of records.
|
||||||
|
*/
|
||||||
|
to?: number;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested line number.
|
||||||
|
*/
|
||||||
|
to_line?: number;
|
||||||
|
toLine?: number;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately around the delimiter, defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
trim?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Info {
|
||||||
|
/**
|
||||||
|
* Count the number of lines being fully commented.
|
||||||
|
*/
|
||||||
|
readonly comment_lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed empty lines.
|
||||||
|
*/
|
||||||
|
readonly empty_lines: number;
|
||||||
|
/**
|
||||||
|
* The number of lines encountered in the source dataset, start at 1 for the first line.
|
||||||
|
*/
|
||||||
|
readonly lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed records.
|
||||||
|
*/
|
||||||
|
readonly records: number;
|
||||||
|
/**
|
||||||
|
* Count of the number of processed bytes.
|
||||||
|
*/
|
||||||
|
readonly bytes: number;
|
||||||
|
/**
|
||||||
|
* Number of non uniform records when `relax_column_count` is true.
|
||||||
|
*/
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
/**
|
||||||
|
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
|
||||||
|
*/
|
||||||
|
readonly columns: boolean | { name: string }[] | { disabled: true }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CsvErrorCode =
|
||||||
|
'CSV_INVALID_OPTION_BOM'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST_DATE'
|
||||||
|
| 'CSV_INVALID_OPTION_COLUMNS'
|
||||||
|
| 'CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME'
|
||||||
|
| 'CSV_INVALID_OPTION_COMMENT'
|
||||||
|
| 'CSV_INVALID_OPTION_DELIMITER'
|
||||||
|
| 'CSV_INVALID_OPTION_ON_RECORD'
|
||||||
|
| 'CSV_INVALID_CLOSING_QUOTE'
|
||||||
|
| 'INVALID_OPENING_QUOTE'
|
||||||
|
| 'CSV_INVALID_COLUMN_MAPPING'
|
||||||
|
| 'CSV_INVALID_ARGUMENT'
|
||||||
|
| 'CSV_INVALID_COLUMN_DEFINITION'
|
||||||
|
| 'CSV_MAX_RECORD_SIZE'
|
||||||
|
| 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
|
||||||
|
| 'CSV_QUOTE_NOT_CLOSED'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_FIELDS_LENGTH'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_COLUMNS'
|
||||||
|
| 'CSV_OPTION_COLUMNS_MISSING_NAME'
|
||||||
|
|
||||||
|
export class CsvError extends Error {
|
||||||
|
readonly code: CsvErrorCode;
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
|
||||||
|
}
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(input: Buffer | string, callback?: Callback): Parser;
|
||||||
|
declare function parse(options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(callback?: Callback): Parser;
|
||||||
|
|
||||||
|
// export default parse;
|
||||||
|
export { parse }
|
1775
node_modules/csv-parse/dist/cjs/sync.cjs
generated
vendored
Normal file
1775
node_modules/csv-parse/dist/cjs/sync.cjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
11
node_modules/csv-parse/dist/cjs/sync.d.cts
generated
vendored
Normal file
11
node_modules/csv-parse/dist/cjs/sync.d.cts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
import { Options } from './index.cjs';
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options): any;
|
||||||
|
// export default parse;
|
||||||
|
export { parse };
|
||||||
|
|
||||||
|
export {
|
||||||
|
CastingContext, CastingFunction, CastingDateFunction,
|
||||||
|
ColumnOption, Options, Info, CsvErrorCode, CsvError
|
||||||
|
} from './index.cjs';
|
283
node_modules/csv-parse/dist/esm/index.d.ts
generated
vendored
Normal file
283
node_modules/csv-parse/dist/esm/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as stream from "stream";
|
||||||
|
|
||||||
|
export type Callback = (err: CsvError | undefined, records: any | undefined, info: Info) => void;
|
||||||
|
|
||||||
|
export interface Parser extends stream.Transform {}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
constructor(options: Options);
|
||||||
|
|
||||||
|
__push(line: any): any;
|
||||||
|
|
||||||
|
__write(chars: any, end: any, callback: any): any;
|
||||||
|
|
||||||
|
readonly options: Options
|
||||||
|
|
||||||
|
readonly info: Info;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CastingContext {
|
||||||
|
readonly column: number | string;
|
||||||
|
readonly empty_lines: number;
|
||||||
|
readonly error: CsvError;
|
||||||
|
readonly header: boolean;
|
||||||
|
readonly index: number;
|
||||||
|
readonly quoting: boolean;
|
||||||
|
readonly lines: number;
|
||||||
|
readonly records: number;
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CastingFunction = (value: string, context: CastingContext) => any;
|
||||||
|
|
||||||
|
export type CastingDateFunction = (value: string, context: CastingContext) => Date;
|
||||||
|
|
||||||
|
export type ColumnOption = string | undefined | null | false | { name: string };
|
||||||
|
|
||||||
|
/*
|
||||||
|
Note, could not `extends stream.TransformOptions` because encoding can be
|
||||||
|
BufferEncoding and undefined as well as null which is not defined in the
|
||||||
|
extended type.
|
||||||
|
*/
|
||||||
|
export interface Options {
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to native types.
|
||||||
|
* @deprecated Use {@link cast}
|
||||||
|
*/
|
||||||
|
auto_parse?: boolean | CastingFunction;
|
||||||
|
autoParse?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
|
||||||
|
* @deprecated Use {@link cast_date}
|
||||||
|
*/
|
||||||
|
auto_parse_date?: boolean | CastingDateFunction;
|
||||||
|
autoParseDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
|
||||||
|
*/
|
||||||
|
bom?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to native types.
|
||||||
|
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
|
||||||
|
*/
|
||||||
|
cast?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to dates.
|
||||||
|
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
|
||||||
|
*/
|
||||||
|
cast_date?: boolean | CastingDateFunction;
|
||||||
|
castDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* List of fields as an array,
|
||||||
|
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
|
||||||
|
* default to null,
|
||||||
|
* affect the result data set in the sense that records will be objects instead of arrays.
|
||||||
|
*/
|
||||||
|
columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
|
||||||
|
/**
|
||||||
|
* Convert values into an array of values when columns are activated and
|
||||||
|
* when multiple columns of the same name are found.
|
||||||
|
*/
|
||||||
|
group_columns_by_name?: boolean;
|
||||||
|
groupColumnsByName?: boolean;
|
||||||
|
/**
|
||||||
|
* Treat all the characters after this one as a comment, default to '' (disabled).
|
||||||
|
*/
|
||||||
|
comment?: string;
|
||||||
|
/**
|
||||||
|
* Restrict the definition of comments to a full line. Comment characters
|
||||||
|
* defined in the middle of the line are not interpreted as such. The
|
||||||
|
* option require the activation of comments.
|
||||||
|
*/
|
||||||
|
comment_no_infix?: boolean;
|
||||||
|
/**
|
||||||
|
* Set the field delimiter. One character only, defaults to comma.
|
||||||
|
*/
|
||||||
|
delimiter?: string | string[] | Buffer;
|
||||||
|
/**
|
||||||
|
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
|
||||||
|
*/
|
||||||
|
encoding?: BufferEncoding | undefined;
|
||||||
|
/**
|
||||||
|
* Set the escape character, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
escape?: string | null | false | Buffer;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested number of records.
|
||||||
|
*/
|
||||||
|
from?: number;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested line number.
|
||||||
|
*/
|
||||||
|
from_line?: number;
|
||||||
|
fromLine?: number;
|
||||||
|
/**
|
||||||
|
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
|
||||||
|
*/
|
||||||
|
ignore_last_delimiters?: boolean | number;
|
||||||
|
/**
|
||||||
|
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
|
||||||
|
*/
|
||||||
|
info?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
ltrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
|
||||||
|
* used to guard against a wrong delimiter or record_delimiter,
|
||||||
|
* default to 128000 characters.
|
||||||
|
*/
|
||||||
|
max_record_size?: number;
|
||||||
|
maxRecordSize?: number;
|
||||||
|
/**
|
||||||
|
* Name of header-record title to name objects by.
|
||||||
|
*/
|
||||||
|
objname?: string;
|
||||||
|
/**
|
||||||
|
* Alter and filter records by executing a user defined function.
|
||||||
|
*/
|
||||||
|
on_record?: (record: any, context: CastingContext) => any;
|
||||||
|
onRecord?: (record: any, context: CastingContext) => any;
|
||||||
|
/**
|
||||||
|
* Optional character surrounding a field, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
quote?: string | boolean | Buffer | null;
|
||||||
|
/**
|
||||||
|
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
|
||||||
|
*/
|
||||||
|
raw?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count?: boolean;
|
||||||
|
relaxColumnCount?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_less?: boolean;
|
||||||
|
relaxColumnCountLess?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_more?: boolean;
|
||||||
|
relaxColumnCountMore?: boolean;
|
||||||
|
/**
|
||||||
|
* Preserve quotes inside unquoted field.
|
||||||
|
*/
|
||||||
|
relax_quotes?: boolean;
|
||||||
|
relaxQuotes?: boolean;
|
||||||
|
/**
|
||||||
|
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
|
||||||
|
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
|
||||||
|
*/
|
||||||
|
record_delimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
recordDelimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
rtrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Dont generate empty values for empty lines.
|
||||||
|
* Defaults to false
|
||||||
|
*/
|
||||||
|
skip_empty_lines?: boolean;
|
||||||
|
skipEmptyLines?: boolean;
|
||||||
|
/**
|
||||||
|
* Skip a line with error found inside and directly go process the next line.
|
||||||
|
*/
|
||||||
|
skip_records_with_error?: boolean;
|
||||||
|
skipRecordsWithError?: boolean;
|
||||||
|
/**
|
||||||
|
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
|
||||||
|
*/
|
||||||
|
skip_records_with_empty_values?: boolean;
|
||||||
|
skipRecordsWithEmptyValues?: boolean;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested number of records.
|
||||||
|
*/
|
||||||
|
to?: number;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested line number.
|
||||||
|
*/
|
||||||
|
to_line?: number;
|
||||||
|
toLine?: number;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately around the delimiter, defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
trim?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Info {
|
||||||
|
/**
|
||||||
|
* Count the number of lines being fully commented.
|
||||||
|
*/
|
||||||
|
readonly comment_lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed empty lines.
|
||||||
|
*/
|
||||||
|
readonly empty_lines: number;
|
||||||
|
/**
|
||||||
|
* The number of lines encountered in the source dataset, start at 1 for the first line.
|
||||||
|
*/
|
||||||
|
readonly lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed records.
|
||||||
|
*/
|
||||||
|
readonly records: number;
|
||||||
|
/**
|
||||||
|
* Count of the number of processed bytes.
|
||||||
|
*/
|
||||||
|
readonly bytes: number;
|
||||||
|
/**
|
||||||
|
* Number of non uniform records when `relax_column_count` is true.
|
||||||
|
*/
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
/**
|
||||||
|
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
|
||||||
|
*/
|
||||||
|
readonly columns: boolean | { name: string }[] | { disabled: true }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CsvErrorCode =
|
||||||
|
'CSV_INVALID_OPTION_BOM'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST_DATE'
|
||||||
|
| 'CSV_INVALID_OPTION_COLUMNS'
|
||||||
|
| 'CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME'
|
||||||
|
| 'CSV_INVALID_OPTION_COMMENT'
|
||||||
|
| 'CSV_INVALID_OPTION_DELIMITER'
|
||||||
|
| 'CSV_INVALID_OPTION_ON_RECORD'
|
||||||
|
| 'CSV_INVALID_CLOSING_QUOTE'
|
||||||
|
| 'INVALID_OPENING_QUOTE'
|
||||||
|
| 'CSV_INVALID_COLUMN_MAPPING'
|
||||||
|
| 'CSV_INVALID_ARGUMENT'
|
||||||
|
| 'CSV_INVALID_COLUMN_DEFINITION'
|
||||||
|
| 'CSV_MAX_RECORD_SIZE'
|
||||||
|
| 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
|
||||||
|
| 'CSV_QUOTE_NOT_CLOSED'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_FIELDS_LENGTH'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_COLUMNS'
|
||||||
|
| 'CSV_OPTION_COLUMNS_MISSING_NAME'
|
||||||
|
|
||||||
|
export class CsvError extends Error {
|
||||||
|
readonly code: CsvErrorCode;
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
|
||||||
|
}
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(input: Buffer | string, callback?: Callback): Parser;
|
||||||
|
declare function parse(options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(callback?: Callback): Parser;
|
||||||
|
|
||||||
|
// export default parse;
|
||||||
|
export { parse }
|
7001
node_modules/csv-parse/dist/esm/index.js
generated
vendored
Normal file
7001
node_modules/csv-parse/dist/esm/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
11
node_modules/csv-parse/dist/esm/stream.d.ts
generated
vendored
Normal file
11
node_modules/csv-parse/dist/esm/stream.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
import { Options } from './index.js';
|
||||||
|
|
||||||
|
declare function parse(options?: Options): TransformStream;
|
||||||
|
// export default parse;
|
||||||
|
export { parse };
|
||||||
|
|
||||||
|
export {
|
||||||
|
CastingContext, CastingFunction, CastingDateFunction,
|
||||||
|
ColumnOption, Options, Info, CsvErrorCode, CsvError
|
||||||
|
} from './index.js';
|
11
node_modules/csv-parse/dist/esm/sync.d.ts
generated
vendored
Normal file
11
node_modules/csv-parse/dist/esm/sync.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
import { Options } from './index.js';
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options): any;
|
||||||
|
// export default parse;
|
||||||
|
export { parse };
|
||||||
|
|
||||||
|
export {
|
||||||
|
CastingContext, CastingFunction, CastingDateFunction,
|
||||||
|
ColumnOption, Options, Info, CsvErrorCode, CsvError
|
||||||
|
} from './index.js';
|
3744
node_modules/csv-parse/dist/esm/sync.js
generated
vendored
Normal file
3744
node_modules/csv-parse/dist/esm/sync.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
7010
node_modules/csv-parse/dist/iife/index.js
generated
vendored
Normal file
7010
node_modules/csv-parse/dist/iife/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
3752
node_modules/csv-parse/dist/iife/sync.js
generated
vendored
Normal file
3752
node_modules/csv-parse/dist/iife/sync.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
7011
node_modules/csv-parse/dist/umd/index.js
generated
vendored
Normal file
7011
node_modules/csv-parse/dist/umd/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
3753
node_modules/csv-parse/dist/umd/sync.js
generated
vendored
Normal file
3753
node_modules/csv-parse/dist/umd/sync.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
22
node_modules/csv-parse/lib/api/CsvError.js
generated
vendored
Normal file
22
node_modules/csv-parse/lib/api/CsvError.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
class CsvError extends Error {
|
||||||
|
constructor(code, message, options, ...contexts) {
|
||||||
|
if (Array.isArray(message)) message = message.join(" ").trim();
|
||||||
|
super(message);
|
||||||
|
if (Error.captureStackTrace !== undefined) {
|
||||||
|
Error.captureStackTrace(this, CsvError);
|
||||||
|
}
|
||||||
|
this.code = code;
|
||||||
|
for (const context of contexts) {
|
||||||
|
for (const key in context) {
|
||||||
|
const value = context[key];
|
||||||
|
this[key] = Buffer.isBuffer(value)
|
||||||
|
? value.toString(options.encoding)
|
||||||
|
: value == null
|
||||||
|
? value
|
||||||
|
: JSON.parse(JSON.stringify(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { CsvError };
|
908
node_modules/csv-parse/lib/api/index.js
generated
vendored
Normal file
908
node_modules/csv-parse/lib/api/index.js
generated
vendored
Normal file
|
@ -0,0 +1,908 @@
|
||||||
|
import { normalize_columns_array } from "./normalize_columns_array.js";
|
||||||
|
import { init_state } from "./init_state.js";
|
||||||
|
import { normalize_options } from "./normalize_options.js";
|
||||||
|
import { CsvError } from "./CsvError.js";
|
||||||
|
|
||||||
|
const isRecordEmpty = function (record) {
|
||||||
|
return record.every(
|
||||||
|
(field) =>
|
||||||
|
field == null || (field.toString && field.toString().trim() === ""),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const cr = 13; // `\r`, carriage return, 0x0D in hexadécimal, 13 in decimal
|
||||||
|
const nl = 10; // `\n`, newline, 0x0A in hexadecimal, 10 in decimal
|
||||||
|
|
||||||
|
const boms = {
|
||||||
|
// Note, the following are equals:
|
||||||
|
// Buffer.from("\ufeff")
|
||||||
|
// Buffer.from([239, 187, 191])
|
||||||
|
// Buffer.from('EFBBBF', 'hex')
|
||||||
|
utf8: Buffer.from([239, 187, 191]),
|
||||||
|
// Note, the following are equals:
|
||||||
|
// Buffer.from "\ufeff", 'utf16le
|
||||||
|
// Buffer.from([255, 254])
|
||||||
|
utf16le: Buffer.from([255, 254]),
|
||||||
|
};
|
||||||
|
|
||||||
|
const transform = function (original_options = {}) {
|
||||||
|
const info = {
|
||||||
|
bytes: 0,
|
||||||
|
comment_lines: 0,
|
||||||
|
empty_lines: 0,
|
||||||
|
invalid_field_length: 0,
|
||||||
|
lines: 1,
|
||||||
|
records: 0,
|
||||||
|
};
|
||||||
|
const options = normalize_options(original_options);
|
||||||
|
return {
|
||||||
|
info: info,
|
||||||
|
original_options: original_options,
|
||||||
|
options: options,
|
||||||
|
state: init_state(options),
|
||||||
|
__needMoreData: function (i, bufLen, end) {
|
||||||
|
if (end) return false;
|
||||||
|
const { encoding, escape, quote } = this.options;
|
||||||
|
const { quoting, needMoreDataSize, recordDelimiterMaxLength } =
|
||||||
|
this.state;
|
||||||
|
const numOfCharLeft = bufLen - i - 1;
|
||||||
|
const requiredLength = Math.max(
|
||||||
|
needMoreDataSize,
|
||||||
|
// Skip if the remaining buffer smaller than record delimiter
|
||||||
|
// If "record_delimiter" is yet to be discovered:
|
||||||
|
// 1. It is equals to `[]` and "recordDelimiterMaxLength" equals `0`
|
||||||
|
// 2. We set the length to windows line ending in the current encoding
|
||||||
|
// Note, that encoding is known from user or bom discovery at that point
|
||||||
|
// recordDelimiterMaxLength,
|
||||||
|
recordDelimiterMaxLength === 0
|
||||||
|
? Buffer.from("\r\n", encoding).length
|
||||||
|
: recordDelimiterMaxLength,
|
||||||
|
// Skip if remaining buffer can be an escaped quote
|
||||||
|
quoting ? (escape === null ? 0 : escape.length) + quote.length : 0,
|
||||||
|
// Skip if remaining buffer can be record delimiter following the closing quote
|
||||||
|
quoting ? quote.length + recordDelimiterMaxLength : 0,
|
||||||
|
);
|
||||||
|
return numOfCharLeft < requiredLength;
|
||||||
|
},
|
||||||
|
// Central parser implementation
|
||||||
|
parse: function (nextBuf, end, push, close) {
|
||||||
|
const {
|
||||||
|
bom,
|
||||||
|
comment_no_infix,
|
||||||
|
encoding,
|
||||||
|
from_line,
|
||||||
|
ltrim,
|
||||||
|
max_record_size,
|
||||||
|
raw,
|
||||||
|
relax_quotes,
|
||||||
|
rtrim,
|
||||||
|
skip_empty_lines,
|
||||||
|
to,
|
||||||
|
to_line,
|
||||||
|
} = this.options;
|
||||||
|
let { comment, escape, quote, record_delimiter } = this.options;
|
||||||
|
const { bomSkipped, previousBuf, rawBuffer, escapeIsQuote } = this.state;
|
||||||
|
let buf;
|
||||||
|
if (previousBuf === undefined) {
|
||||||
|
if (nextBuf === undefined) {
|
||||||
|
// Handle empty string
|
||||||
|
close();
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
buf = nextBuf;
|
||||||
|
}
|
||||||
|
} else if (previousBuf !== undefined && nextBuf === undefined) {
|
||||||
|
buf = previousBuf;
|
||||||
|
} else {
|
||||||
|
buf = Buffer.concat([previousBuf, nextBuf]);
|
||||||
|
}
|
||||||
|
// Handle UTF BOM
|
||||||
|
if (bomSkipped === false) {
|
||||||
|
if (bom === false) {
|
||||||
|
this.state.bomSkipped = true;
|
||||||
|
} else if (buf.length < 3) {
|
||||||
|
// No enough data
|
||||||
|
if (end === false) {
|
||||||
|
// Wait for more data
|
||||||
|
this.state.previousBuf = buf;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (const encoding in boms) {
|
||||||
|
if (boms[encoding].compare(buf, 0, boms[encoding].length) === 0) {
|
||||||
|
// Skip BOM
|
||||||
|
const bomLength = boms[encoding].length;
|
||||||
|
this.state.bufBytesStart += bomLength;
|
||||||
|
buf = buf.slice(bomLength);
|
||||||
|
// Renormalize original options with the new encoding
|
||||||
|
this.options = normalize_options({
|
||||||
|
...this.original_options,
|
||||||
|
encoding: encoding,
|
||||||
|
});
|
||||||
|
// Options will re-evaluate the Buffer with the new encoding
|
||||||
|
({ comment, escape, quote } = this.options);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.state.bomSkipped = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const bufLen = buf.length;
|
||||||
|
let pos;
|
||||||
|
for (pos = 0; pos < bufLen; pos++) {
|
||||||
|
// Ensure we get enough space to look ahead
|
||||||
|
// There should be a way to move this out of the loop
|
||||||
|
if (this.__needMoreData(pos, bufLen, end)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (this.state.wasRowDelimiter === true) {
|
||||||
|
this.info.lines++;
|
||||||
|
this.state.wasRowDelimiter = false;
|
||||||
|
}
|
||||||
|
if (to_line !== -1 && this.info.lines > to_line) {
|
||||||
|
this.state.stop = true;
|
||||||
|
close();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Auto discovery of record_delimiter, unix, mac and windows supported
|
||||||
|
if (this.state.quoting === false && record_delimiter.length === 0) {
|
||||||
|
const record_delimiterCount = this.__autoDiscoverRecordDelimiter(
|
||||||
|
buf,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
if (record_delimiterCount) {
|
||||||
|
record_delimiter = this.options.record_delimiter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const chr = buf[pos];
|
||||||
|
if (raw === true) {
|
||||||
|
rawBuffer.append(chr);
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
(chr === cr || chr === nl) &&
|
||||||
|
this.state.wasRowDelimiter === false
|
||||||
|
) {
|
||||||
|
this.state.wasRowDelimiter = true;
|
||||||
|
}
|
||||||
|
// Previous char was a valid escape char
|
||||||
|
// treat the current char as a regular char
|
||||||
|
if (this.state.escaping === true) {
|
||||||
|
this.state.escaping = false;
|
||||||
|
} else {
|
||||||
|
// Escape is only active inside quoted fields
|
||||||
|
// We are quoting, the char is an escape chr and there is a chr to escape
|
||||||
|
// if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
|
||||||
|
if (
|
||||||
|
escape !== null &&
|
||||||
|
this.state.quoting === true &&
|
||||||
|
this.__isEscape(buf, pos, chr) &&
|
||||||
|
pos + escape.length < bufLen
|
||||||
|
) {
|
||||||
|
if (escapeIsQuote) {
|
||||||
|
if (this.__isQuote(buf, pos + escape.length)) {
|
||||||
|
this.state.escaping = true;
|
||||||
|
pos += escape.length - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.state.escaping = true;
|
||||||
|
pos += escape.length - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Not currently escaping and chr is a quote
|
||||||
|
// TODO: need to compare bytes instead of single char
|
||||||
|
if (this.state.commenting === false && this.__isQuote(buf, pos)) {
|
||||||
|
if (this.state.quoting === true) {
|
||||||
|
const nextChr = buf[pos + quote.length];
|
||||||
|
const isNextChrTrimable =
|
||||||
|
rtrim && this.__isCharTrimable(buf, pos + quote.length);
|
||||||
|
const isNextChrComment =
|
||||||
|
comment !== null &&
|
||||||
|
this.__compareBytes(comment, buf, pos + quote.length, nextChr);
|
||||||
|
const isNextChrDelimiter = this.__isDelimiter(
|
||||||
|
buf,
|
||||||
|
pos + quote.length,
|
||||||
|
nextChr,
|
||||||
|
);
|
||||||
|
const isNextChrRecordDelimiter =
|
||||||
|
record_delimiter.length === 0
|
||||||
|
? this.__autoDiscoverRecordDelimiter(buf, pos + quote.length)
|
||||||
|
: this.__isRecordDelimiter(nextChr, buf, pos + quote.length);
|
||||||
|
// Escape a quote
|
||||||
|
// Treat next char as a regular character
|
||||||
|
if (
|
||||||
|
escape !== null &&
|
||||||
|
this.__isEscape(buf, pos, chr) &&
|
||||||
|
this.__isQuote(buf, pos + escape.length)
|
||||||
|
) {
|
||||||
|
pos += escape.length - 1;
|
||||||
|
} else if (
|
||||||
|
!nextChr ||
|
||||||
|
isNextChrDelimiter ||
|
||||||
|
isNextChrRecordDelimiter ||
|
||||||
|
isNextChrComment ||
|
||||||
|
isNextChrTrimable
|
||||||
|
) {
|
||||||
|
this.state.quoting = false;
|
||||||
|
this.state.wasQuoting = true;
|
||||||
|
pos += quote.length - 1;
|
||||||
|
continue;
|
||||||
|
} else if (relax_quotes === false) {
|
||||||
|
const err = this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"CSV_INVALID_CLOSING_QUOTE",
|
||||||
|
[
|
||||||
|
"Invalid Closing Quote:",
|
||||||
|
`got "${String.fromCharCode(nextChr)}"`,
|
||||||
|
`at line ${this.info.lines}`,
|
||||||
|
"instead of delimiter, record delimiter, trimable character",
|
||||||
|
"(if activated) or comment",
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
if (err !== undefined) return err;
|
||||||
|
} else {
|
||||||
|
this.state.quoting = false;
|
||||||
|
this.state.wasQuoting = true;
|
||||||
|
this.state.field.prepend(quote);
|
||||||
|
pos += quote.length - 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (this.state.field.length !== 0) {
|
||||||
|
// In relax_quotes mode, treat opening quote preceded by chrs as regular
|
||||||
|
if (relax_quotes === false) {
|
||||||
|
const info = this.__infoField();
|
||||||
|
const bom = Object.keys(boms)
|
||||||
|
.map((b) =>
|
||||||
|
boms[b].equals(this.state.field.toString()) ? b : false,
|
||||||
|
)
|
||||||
|
.filter(Boolean)[0];
|
||||||
|
const err = this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"INVALID_OPENING_QUOTE",
|
||||||
|
[
|
||||||
|
"Invalid Opening Quote:",
|
||||||
|
`a quote is found on field ${JSON.stringify(info.column)} at line ${info.lines}, value is ${JSON.stringify(this.state.field.toString(encoding))}`,
|
||||||
|
bom ? `(${bom} bom)` : undefined,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
info,
|
||||||
|
{
|
||||||
|
field: this.state.field,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
if (err !== undefined) return err;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.state.quoting = true;
|
||||||
|
pos += quote.length - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.state.quoting === false) {
|
||||||
|
const recordDelimiterLength = this.__isRecordDelimiter(
|
||||||
|
chr,
|
||||||
|
buf,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
if (recordDelimiterLength !== 0) {
|
||||||
|
// Do not emit comments which take a full line
|
||||||
|
const skipCommentLine =
|
||||||
|
this.state.commenting &&
|
||||||
|
this.state.wasQuoting === false &&
|
||||||
|
this.state.record.length === 0 &&
|
||||||
|
this.state.field.length === 0;
|
||||||
|
if (skipCommentLine) {
|
||||||
|
this.info.comment_lines++;
|
||||||
|
// Skip full comment line
|
||||||
|
} else {
|
||||||
|
// Activate records emition if above from_line
|
||||||
|
if (
|
||||||
|
this.state.enabled === false &&
|
||||||
|
this.info.lines +
|
||||||
|
(this.state.wasRowDelimiter === true ? 1 : 0) >=
|
||||||
|
from_line
|
||||||
|
) {
|
||||||
|
this.state.enabled = true;
|
||||||
|
this.__resetField();
|
||||||
|
this.__resetRecord();
|
||||||
|
pos += recordDelimiterLength - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Skip if line is empty and skip_empty_lines activated
|
||||||
|
if (
|
||||||
|
skip_empty_lines === true &&
|
||||||
|
this.state.wasQuoting === false &&
|
||||||
|
this.state.record.length === 0 &&
|
||||||
|
this.state.field.length === 0
|
||||||
|
) {
|
||||||
|
this.info.empty_lines++;
|
||||||
|
pos += recordDelimiterLength - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
this.info.bytes = this.state.bufBytesStart + pos;
|
||||||
|
const errField = this.__onField();
|
||||||
|
if (errField !== undefined) return errField;
|
||||||
|
this.info.bytes =
|
||||||
|
this.state.bufBytesStart + pos + recordDelimiterLength;
|
||||||
|
const errRecord = this.__onRecord(push);
|
||||||
|
if (errRecord !== undefined) return errRecord;
|
||||||
|
if (to !== -1 && this.info.records >= to) {
|
||||||
|
this.state.stop = true;
|
||||||
|
close();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.state.commenting = false;
|
||||||
|
pos += recordDelimiterLength - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (this.state.commenting) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
comment !== null &&
|
||||||
|
(comment_no_infix === false ||
|
||||||
|
(this.state.record.length === 0 &&
|
||||||
|
this.state.field.length === 0))
|
||||||
|
) {
|
||||||
|
const commentCount = this.__compareBytes(comment, buf, pos, chr);
|
||||||
|
if (commentCount !== 0) {
|
||||||
|
this.state.commenting = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const delimiterLength = this.__isDelimiter(buf, pos, chr);
|
||||||
|
if (delimiterLength !== 0) {
|
||||||
|
this.info.bytes = this.state.bufBytesStart + pos;
|
||||||
|
const errField = this.__onField();
|
||||||
|
if (errField !== undefined) return errField;
|
||||||
|
pos += delimiterLength - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.state.commenting === false) {
|
||||||
|
if (
|
||||||
|
max_record_size !== 0 &&
|
||||||
|
this.state.record_length + this.state.field.length > max_record_size
|
||||||
|
) {
|
||||||
|
return this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"CSV_MAX_RECORD_SIZE",
|
||||||
|
[
|
||||||
|
"Max Record Size:",
|
||||||
|
"record exceed the maximum number of tolerated bytes",
|
||||||
|
`of ${max_record_size}`,
|
||||||
|
`at line ${this.info.lines}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const lappend =
|
||||||
|
ltrim === false ||
|
||||||
|
this.state.quoting === true ||
|
||||||
|
this.state.field.length !== 0 ||
|
||||||
|
!this.__isCharTrimable(buf, pos);
|
||||||
|
// rtrim in non quoting is handle in __onField
|
||||||
|
const rappend = rtrim === false || this.state.wasQuoting === false;
|
||||||
|
if (lappend === true && rappend === true) {
|
||||||
|
this.state.field.append(chr);
|
||||||
|
} else if (rtrim === true && !this.__isCharTrimable(buf, pos)) {
|
||||||
|
return this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE",
|
||||||
|
[
|
||||||
|
"Invalid Closing Quote:",
|
||||||
|
"found non trimable byte after quote",
|
||||||
|
`at line ${this.info.lines}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
if (lappend === false) {
|
||||||
|
pos += this.__isCharTrimable(buf, pos) - 1;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (end === true) {
|
||||||
|
// Ensure we are not ending in a quoting state
|
||||||
|
if (this.state.quoting === true) {
|
||||||
|
const err = this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"CSV_QUOTE_NOT_CLOSED",
|
||||||
|
[
|
||||||
|
"Quote Not Closed:",
|
||||||
|
`the parsing is finished with an opening quote at line ${this.info.lines}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
if (err !== undefined) return err;
|
||||||
|
} else {
|
||||||
|
// Skip last line if it has no characters
|
||||||
|
if (
|
||||||
|
this.state.wasQuoting === true ||
|
||||||
|
this.state.record.length !== 0 ||
|
||||||
|
this.state.field.length !== 0
|
||||||
|
) {
|
||||||
|
this.info.bytes = this.state.bufBytesStart + pos;
|
||||||
|
const errField = this.__onField();
|
||||||
|
if (errField !== undefined) return errField;
|
||||||
|
const errRecord = this.__onRecord(push);
|
||||||
|
if (errRecord !== undefined) return errRecord;
|
||||||
|
} else if (this.state.wasRowDelimiter === true) {
|
||||||
|
this.info.empty_lines++;
|
||||||
|
} else if (this.state.commenting === true) {
|
||||||
|
this.info.comment_lines++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.state.bufBytesStart += pos;
|
||||||
|
this.state.previousBuf = buf.slice(pos);
|
||||||
|
}
|
||||||
|
if (this.state.wasRowDelimiter === true) {
|
||||||
|
this.info.lines++;
|
||||||
|
this.state.wasRowDelimiter = false;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
__onRecord: function (push) {
|
||||||
|
const {
|
||||||
|
columns,
|
||||||
|
group_columns_by_name,
|
||||||
|
encoding,
|
||||||
|
info,
|
||||||
|
from,
|
||||||
|
relax_column_count,
|
||||||
|
relax_column_count_less,
|
||||||
|
relax_column_count_more,
|
||||||
|
raw,
|
||||||
|
skip_records_with_empty_values,
|
||||||
|
} = this.options;
|
||||||
|
const { enabled, record } = this.state;
|
||||||
|
if (enabled === false) {
|
||||||
|
return this.__resetRecord();
|
||||||
|
}
|
||||||
|
// Convert the first line into column names
|
||||||
|
const recordLength = record.length;
|
||||||
|
if (columns === true) {
|
||||||
|
if (skip_records_with_empty_values === true && isRecordEmpty(record)) {
|
||||||
|
this.__resetRecord();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return this.__firstLineToColumns(record);
|
||||||
|
}
|
||||||
|
if (columns === false && this.info.records === 0) {
|
||||||
|
this.state.expectedRecordLength = recordLength;
|
||||||
|
}
|
||||||
|
if (recordLength !== this.state.expectedRecordLength) {
|
||||||
|
const err =
|
||||||
|
columns === false
|
||||||
|
? new CsvError(
|
||||||
|
"CSV_RECORD_INCONSISTENT_FIELDS_LENGTH",
|
||||||
|
[
|
||||||
|
"Invalid Record Length:",
|
||||||
|
`expect ${this.state.expectedRecordLength},`,
|
||||||
|
`got ${recordLength} on line ${this.info.lines}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
{
|
||||||
|
record: record,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
: new CsvError(
|
||||||
|
"CSV_RECORD_INCONSISTENT_COLUMNS",
|
||||||
|
[
|
||||||
|
"Invalid Record Length:",
|
||||||
|
`columns length is ${columns.length},`, // rename columns
|
||||||
|
`got ${recordLength} on line ${this.info.lines}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
{
|
||||||
|
record: record,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
relax_column_count === true ||
|
||||||
|
(relax_column_count_less === true &&
|
||||||
|
recordLength < this.state.expectedRecordLength) ||
|
||||||
|
(relax_column_count_more === true &&
|
||||||
|
recordLength > this.state.expectedRecordLength)
|
||||||
|
) {
|
||||||
|
this.info.invalid_field_length++;
|
||||||
|
this.state.error = err;
|
||||||
|
// Error is undefined with skip_records_with_error
|
||||||
|
} else {
|
||||||
|
const finalErr = this.__error(err);
|
||||||
|
if (finalErr) return finalErr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (skip_records_with_empty_values === true && isRecordEmpty(record)) {
|
||||||
|
this.__resetRecord();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this.state.recordHasError === true) {
|
||||||
|
this.__resetRecord();
|
||||||
|
this.state.recordHasError = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.info.records++;
|
||||||
|
if (from === 1 || this.info.records >= from) {
|
||||||
|
const { objname } = this.options;
|
||||||
|
// With columns, records are object
|
||||||
|
if (columns !== false) {
|
||||||
|
const obj = {};
|
||||||
|
// Transform record array to an object
|
||||||
|
for (let i = 0, l = record.length; i < l; i++) {
|
||||||
|
if (columns[i] === undefined || columns[i].disabled) continue;
|
||||||
|
// Turn duplicate columns into an array
|
||||||
|
if (
|
||||||
|
group_columns_by_name === true &&
|
||||||
|
obj[columns[i].name] !== undefined
|
||||||
|
) {
|
||||||
|
if (Array.isArray(obj[columns[i].name])) {
|
||||||
|
obj[columns[i].name] = obj[columns[i].name].concat(record[i]);
|
||||||
|
} else {
|
||||||
|
obj[columns[i].name] = [obj[columns[i].name], record[i]];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
obj[columns[i].name] = record[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Without objname (default)
|
||||||
|
if (raw === true || info === true) {
|
||||||
|
const extRecord = Object.assign(
|
||||||
|
{ record: obj },
|
||||||
|
raw === true
|
||||||
|
? { raw: this.state.rawBuffer.toString(encoding) }
|
||||||
|
: {},
|
||||||
|
info === true ? { info: this.__infoRecord() } : {},
|
||||||
|
);
|
||||||
|
const err = this.__push(
|
||||||
|
objname === undefined ? extRecord : [obj[objname], extRecord],
|
||||||
|
push,
|
||||||
|
);
|
||||||
|
if (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const err = this.__push(
|
||||||
|
objname === undefined ? obj : [obj[objname], obj],
|
||||||
|
push,
|
||||||
|
);
|
||||||
|
if (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Without columns, records are array
|
||||||
|
} else {
|
||||||
|
if (raw === true || info === true) {
|
||||||
|
const extRecord = Object.assign(
|
||||||
|
{ record: record },
|
||||||
|
raw === true
|
||||||
|
? { raw: this.state.rawBuffer.toString(encoding) }
|
||||||
|
: {},
|
||||||
|
info === true ? { info: this.__infoRecord() } : {},
|
||||||
|
);
|
||||||
|
const err = this.__push(
|
||||||
|
objname === undefined ? extRecord : [record[objname], extRecord],
|
||||||
|
push,
|
||||||
|
);
|
||||||
|
if (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const err = this.__push(
|
||||||
|
objname === undefined ? record : [record[objname], record],
|
||||||
|
push,
|
||||||
|
);
|
||||||
|
if (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.__resetRecord();
|
||||||
|
},
|
||||||
|
__firstLineToColumns: function (record) {
|
||||||
|
const { firstLineToHeaders } = this.state;
|
||||||
|
try {
|
||||||
|
const headers =
|
||||||
|
firstLineToHeaders === undefined
|
||||||
|
? record
|
||||||
|
: firstLineToHeaders.call(null, record);
|
||||||
|
if (!Array.isArray(headers)) {
|
||||||
|
return this.__error(
|
||||||
|
new CsvError(
|
||||||
|
"CSV_INVALID_COLUMN_MAPPING",
|
||||||
|
[
|
||||||
|
"Invalid Column Mapping:",
|
||||||
|
"expect an array from column function,",
|
||||||
|
`got ${JSON.stringify(headers)}`,
|
||||||
|
],
|
||||||
|
this.options,
|
||||||
|
this.__infoField(),
|
||||||
|
{
|
||||||
|
headers: headers,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const normalizedHeaders = normalize_columns_array(headers);
|
||||||
|
this.state.expectedRecordLength = normalizedHeaders.length;
|
||||||
|
this.options.columns = normalizedHeaders;
|
||||||
|
this.__resetRecord();
|
||||||
|
return;
|
||||||
|
} catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
__resetRecord: function () {
|
||||||
|
if (this.options.raw === true) {
|
||||||
|
this.state.rawBuffer.reset();
|
||||||
|
}
|
||||||
|
this.state.error = undefined;
|
||||||
|
this.state.record = [];
|
||||||
|
this.state.record_length = 0;
|
||||||
|
},
|
||||||
|
__onField: function () {
|
||||||
|
const { cast, encoding, rtrim, max_record_size } = this.options;
|
||||||
|
const { enabled, wasQuoting } = this.state;
|
||||||
|
// Short circuit for the from_line options
|
||||||
|
if (enabled === false) {
|
||||||
|
return this.__resetField();
|
||||||
|
}
|
||||||
|
let field = this.state.field.toString(encoding);
|
||||||
|
if (rtrim === true && wasQuoting === false) {
|
||||||
|
field = field.trimRight();
|
||||||
|
}
|
||||||
|
if (cast === true) {
|
||||||
|
const [err, f] = this.__cast(field);
|
||||||
|
if (err !== undefined) return err;
|
||||||
|
field = f;
|
||||||
|
}
|
||||||
|
this.state.record.push(field);
|
||||||
|
// Increment record length if record size must not exceed a limit
|
||||||
|
if (max_record_size !== 0 && typeof field === "string") {
|
||||||
|
this.state.record_length += field.length;
|
||||||
|
}
|
||||||
|
this.__resetField();
|
||||||
|
},
|
||||||
|
__resetField: function () {
|
||||||
|
this.state.field.reset();
|
||||||
|
this.state.wasQuoting = false;
|
||||||
|
},
|
||||||
|
__push: function (record, push) {
|
||||||
|
const { on_record } = this.options;
|
||||||
|
if (on_record !== undefined) {
|
||||||
|
const info = this.__infoRecord();
|
||||||
|
try {
|
||||||
|
record = on_record.call(null, record, info);
|
||||||
|
} catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
if (record === undefined || record === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
push(record);
|
||||||
|
},
|
||||||
|
// Return a tuple with the error and the casted value
|
||||||
|
__cast: function (field) {
|
||||||
|
const { columns, relax_column_count } = this.options;
|
||||||
|
const isColumns = Array.isArray(columns);
|
||||||
|
// Dont loose time calling cast
|
||||||
|
// because the final record is an object
|
||||||
|
// and this field can't be associated to a key present in columns
|
||||||
|
if (
|
||||||
|
isColumns === true &&
|
||||||
|
relax_column_count &&
|
||||||
|
this.options.columns.length <= this.state.record.length
|
||||||
|
) {
|
||||||
|
return [undefined, undefined];
|
||||||
|
}
|
||||||
|
if (this.state.castField !== null) {
|
||||||
|
try {
|
||||||
|
const info = this.__infoField();
|
||||||
|
return [undefined, this.state.castField.call(null, field, info)];
|
||||||
|
} catch (err) {
|
||||||
|
return [err];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.__isFloat(field)) {
|
||||||
|
return [undefined, parseFloat(field)];
|
||||||
|
} else if (this.options.cast_date !== false) {
|
||||||
|
const info = this.__infoField();
|
||||||
|
return [undefined, this.options.cast_date.call(null, field, info)];
|
||||||
|
}
|
||||||
|
return [undefined, field];
|
||||||
|
},
|
||||||
|
// Helper to test if a character is a space or a line delimiter
|
||||||
|
__isCharTrimable: function (buf, pos) {
|
||||||
|
const isTrim = (buf, pos) => {
|
||||||
|
const { timchars } = this.state;
|
||||||
|
loop1: for (let i = 0; i < timchars.length; i++) {
|
||||||
|
const timchar = timchars[i];
|
||||||
|
for (let j = 0; j < timchar.length; j++) {
|
||||||
|
if (timchar[j] !== buf[pos + j]) continue loop1;
|
||||||
|
}
|
||||||
|
return timchar.length;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
return isTrim(buf, pos);
|
||||||
|
},
|
||||||
|
// Keep it in case we implement the `cast_int` option
|
||||||
|
// __isInt(value){
|
||||||
|
// // return Number.isInteger(parseInt(value))
|
||||||
|
// // return !isNaN( parseInt( obj ) );
|
||||||
|
// return /^(\-|\+)?[1-9][0-9]*$/.test(value)
|
||||||
|
// }
|
||||||
|
__isFloat: function (value) {
|
||||||
|
return value - parseFloat(value) + 1 >= 0; // Borrowed from jquery
|
||||||
|
},
|
||||||
|
__compareBytes: function (sourceBuf, targetBuf, targetPos, firstByte) {
|
||||||
|
if (sourceBuf[0] !== firstByte) return 0;
|
||||||
|
const sourceLength = sourceBuf.length;
|
||||||
|
for (let i = 1; i < sourceLength; i++) {
|
||||||
|
if (sourceBuf[i] !== targetBuf[targetPos + i]) return 0;
|
||||||
|
}
|
||||||
|
return sourceLength;
|
||||||
|
},
|
||||||
|
__isDelimiter: function (buf, pos, chr) {
|
||||||
|
const { delimiter, ignore_last_delimiters } = this.options;
|
||||||
|
if (
|
||||||
|
ignore_last_delimiters === true &&
|
||||||
|
this.state.record.length === this.options.columns.length - 1
|
||||||
|
) {
|
||||||
|
return 0;
|
||||||
|
} else if (
|
||||||
|
ignore_last_delimiters !== false &&
|
||||||
|
typeof ignore_last_delimiters === "number" &&
|
||||||
|
this.state.record.length === ignore_last_delimiters - 1
|
||||||
|
) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
loop1: for (let i = 0; i < delimiter.length; i++) {
|
||||||
|
const del = delimiter[i];
|
||||||
|
if (del[0] === chr) {
|
||||||
|
for (let j = 1; j < del.length; j++) {
|
||||||
|
if (del[j] !== buf[pos + j]) continue loop1;
|
||||||
|
}
|
||||||
|
return del.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
},
|
||||||
|
__isRecordDelimiter: function (chr, buf, pos) {
|
||||||
|
const { record_delimiter } = this.options;
|
||||||
|
const recordDelimiterLength = record_delimiter.length;
|
||||||
|
loop1: for (let i = 0; i < recordDelimiterLength; i++) {
|
||||||
|
const rd = record_delimiter[i];
|
||||||
|
const rdLength = rd.length;
|
||||||
|
if (rd[0] !== chr) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (let j = 1; j < rdLength; j++) {
|
||||||
|
if (rd[j] !== buf[pos + j]) {
|
||||||
|
continue loop1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rd.length;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
},
|
||||||
|
__isEscape: function (buf, pos, chr) {
|
||||||
|
const { escape } = this.options;
|
||||||
|
if (escape === null) return false;
|
||||||
|
const l = escape.length;
|
||||||
|
if (escape[0] === chr) {
|
||||||
|
for (let i = 0; i < l; i++) {
|
||||||
|
if (escape[i] !== buf[pos + i]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
__isQuote: function (buf, pos) {
|
||||||
|
const { quote } = this.options;
|
||||||
|
if (quote === null) return false;
|
||||||
|
const l = quote.length;
|
||||||
|
for (let i = 0; i < l; i++) {
|
||||||
|
if (quote[i] !== buf[pos + i]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
__autoDiscoverRecordDelimiter: function (buf, pos) {
|
||||||
|
const { encoding } = this.options;
|
||||||
|
// Note, we don't need to cache this information in state,
|
||||||
|
// It is only called on the first line until we find out a suitable
|
||||||
|
// record delimiter.
|
||||||
|
const rds = [
|
||||||
|
// Important, the windows line ending must be before mac os 9
|
||||||
|
Buffer.from("\r\n", encoding),
|
||||||
|
Buffer.from("\n", encoding),
|
||||||
|
Buffer.from("\r", encoding),
|
||||||
|
];
|
||||||
|
loop: for (let i = 0; i < rds.length; i++) {
|
||||||
|
const l = rds[i].length;
|
||||||
|
for (let j = 0; j < l; j++) {
|
||||||
|
if (rds[i][j] !== buf[pos + j]) {
|
||||||
|
continue loop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.options.record_delimiter.push(rds[i]);
|
||||||
|
this.state.recordDelimiterMaxLength = rds[i].length;
|
||||||
|
return rds[i].length;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
},
|
||||||
|
__error: function (msg) {
|
||||||
|
const { encoding, raw, skip_records_with_error } = this.options;
|
||||||
|
const err = typeof msg === "string" ? new Error(msg) : msg;
|
||||||
|
if (skip_records_with_error) {
|
||||||
|
this.state.recordHasError = true;
|
||||||
|
if (this.options.on_skip !== undefined) {
|
||||||
|
this.options.on_skip(
|
||||||
|
err,
|
||||||
|
raw ? this.state.rawBuffer.toString(encoding) : undefined,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// this.emit('skip', err, raw ? this.state.rawBuffer.toString(encoding) : undefined);
|
||||||
|
return undefined;
|
||||||
|
} else {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
__infoDataSet: function () {
|
||||||
|
return {
|
||||||
|
...this.info,
|
||||||
|
columns: this.options.columns,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
__infoRecord: function () {
|
||||||
|
const { columns, raw, encoding } = this.options;
|
||||||
|
return {
|
||||||
|
...this.__infoDataSet(),
|
||||||
|
error: this.state.error,
|
||||||
|
header: columns === true,
|
||||||
|
index: this.state.record.length,
|
||||||
|
raw: raw ? this.state.rawBuffer.toString(encoding) : undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
__infoField: function () {
|
||||||
|
const { columns } = this.options;
|
||||||
|
const isColumns = Array.isArray(columns);
|
||||||
|
return {
|
||||||
|
...this.__infoRecord(),
|
||||||
|
column:
|
||||||
|
isColumns === true
|
||||||
|
? columns.length > this.state.record.length
|
||||||
|
? columns[this.state.record.length].name
|
||||||
|
: null
|
||||||
|
: this.state.record.length,
|
||||||
|
quoting: this.state.wasQuoting,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export { transform, CsvError };
|
68
node_modules/csv-parse/lib/api/init_state.js
generated
vendored
Normal file
68
node_modules/csv-parse/lib/api/init_state.js
generated
vendored
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
import ResizeableBuffer from "../utils/ResizeableBuffer.js";
|
||||||
|
|
||||||
|
// white space characters
|
||||||
|
// https://en.wikipedia.org/wiki/Whitespace_character
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
|
||||||
|
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
|
||||||
|
const np = 12;
|
||||||
|
const cr = 13; // `\r`, carriage return, 0x0D in hexadécimal, 13 in decimal
|
||||||
|
const nl = 10; // `\n`, newline, 0x0A in hexadecimal, 10 in decimal
|
||||||
|
const space = 32;
|
||||||
|
const tab = 9;
|
||||||
|
|
||||||
|
const init_state = function (options) {
|
||||||
|
return {
|
||||||
|
bomSkipped: false,
|
||||||
|
bufBytesStart: 0,
|
||||||
|
castField: options.cast_function,
|
||||||
|
commenting: false,
|
||||||
|
// Current error encountered by a record
|
||||||
|
error: undefined,
|
||||||
|
enabled: options.from_line === 1,
|
||||||
|
escaping: false,
|
||||||
|
escapeIsQuote:
|
||||||
|
Buffer.isBuffer(options.escape) &&
|
||||||
|
Buffer.isBuffer(options.quote) &&
|
||||||
|
Buffer.compare(options.escape, options.quote) === 0,
|
||||||
|
// columns can be `false`, `true`, `Array`
|
||||||
|
expectedRecordLength: Array.isArray(options.columns)
|
||||||
|
? options.columns.length
|
||||||
|
: undefined,
|
||||||
|
field: new ResizeableBuffer(20),
|
||||||
|
firstLineToHeaders: options.cast_first_line_to_header,
|
||||||
|
needMoreDataSize: Math.max(
|
||||||
|
// Skip if the remaining buffer smaller than comment
|
||||||
|
options.comment !== null ? options.comment.length : 0,
|
||||||
|
// Skip if the remaining buffer can be delimiter
|
||||||
|
...options.delimiter.map((delimiter) => delimiter.length),
|
||||||
|
// Skip if the remaining buffer can be escape sequence
|
||||||
|
options.quote !== null ? options.quote.length : 0,
|
||||||
|
),
|
||||||
|
previousBuf: undefined,
|
||||||
|
quoting: false,
|
||||||
|
stop: false,
|
||||||
|
rawBuffer: new ResizeableBuffer(100),
|
||||||
|
record: [],
|
||||||
|
recordHasError: false,
|
||||||
|
record_length: 0,
|
||||||
|
recordDelimiterMaxLength:
|
||||||
|
options.record_delimiter.length === 0
|
||||||
|
? 0
|
||||||
|
: Math.max(...options.record_delimiter.map((v) => v.length)),
|
||||||
|
trimChars: [
|
||||||
|
Buffer.from(" ", options.encoding)[0],
|
||||||
|
Buffer.from("\t", options.encoding)[0],
|
||||||
|
],
|
||||||
|
wasQuoting: false,
|
||||||
|
wasRowDelimiter: false,
|
||||||
|
timchars: [
|
||||||
|
Buffer.from(Buffer.from([cr], "utf8").toString(), options.encoding),
|
||||||
|
Buffer.from(Buffer.from([nl], "utf8").toString(), options.encoding),
|
||||||
|
Buffer.from(Buffer.from([np], "utf8").toString(), options.encoding),
|
||||||
|
Buffer.from(Buffer.from([space], "utf8").toString(), options.encoding),
|
||||||
|
Buffer.from(Buffer.from([tab], "utf8").toString(), options.encoding),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export { init_state };
|
32
node_modules/csv-parse/lib/api/normalize_columns_array.js
generated
vendored
Normal file
32
node_modules/csv-parse/lib/api/normalize_columns_array.js
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import { CsvError } from "./CsvError.js";
|
||||||
|
import { is_object } from "../utils/is_object.js";
|
||||||
|
|
||||||
|
const normalize_columns_array = function (columns) {
|
||||||
|
const normalizedColumns = [];
|
||||||
|
for (let i = 0, l = columns.length; i < l; i++) {
|
||||||
|
const column = columns[i];
|
||||||
|
if (column === undefined || column === null || column === false) {
|
||||||
|
normalizedColumns[i] = { disabled: true };
|
||||||
|
} else if (typeof column === "string") {
|
||||||
|
normalizedColumns[i] = { name: column };
|
||||||
|
} else if (is_object(column)) {
|
||||||
|
if (typeof column.name !== "string") {
|
||||||
|
throw new CsvError("CSV_OPTION_COLUMNS_MISSING_NAME", [
|
||||||
|
"Option columns missing name:",
|
||||||
|
`property "name" is required at position ${i}`,
|
||||||
|
"when column is an object literal",
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
normalizedColumns[i] = column;
|
||||||
|
} else {
|
||||||
|
throw new CsvError("CSV_INVALID_COLUMN_DEFINITION", [
|
||||||
|
"Invalid column definition:",
|
||||||
|
"expect a string or a literal object,",
|
||||||
|
`got ${JSON.stringify(column)} at position ${i}`,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return normalizedColumns;
|
||||||
|
};
|
||||||
|
|
||||||
|
export { normalize_columns_array };
|
691
node_modules/csv-parse/lib/api/normalize_options.js
generated
vendored
Normal file
691
node_modules/csv-parse/lib/api/normalize_options.js
generated
vendored
Normal file
|
@ -0,0 +1,691 @@
|
||||||
|
import { normalize_columns_array } from "./normalize_columns_array.js";
|
||||||
|
import { CsvError } from "./CsvError.js";
|
||||||
|
import { underscore } from "../utils/underscore.js";
|
||||||
|
|
||||||
|
const normalize_options = function (opts) {
|
||||||
|
const options = {};
|
||||||
|
// Merge with user options
|
||||||
|
for (const opt in opts) {
|
||||||
|
options[underscore(opt)] = opts[opt];
|
||||||
|
}
|
||||||
|
// Normalize option `encoding`
|
||||||
|
// Note: defined first because other options depends on it
|
||||||
|
// to convert chars/strings into buffers.
|
||||||
|
if (options.encoding === undefined || options.encoding === true) {
|
||||||
|
options.encoding = "utf8";
|
||||||
|
} else if (options.encoding === null || options.encoding === false) {
|
||||||
|
options.encoding = null;
|
||||||
|
} else if (
|
||||||
|
typeof options.encoding !== "string" &&
|
||||||
|
options.encoding !== null
|
||||||
|
) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_ENCODING",
|
||||||
|
[
|
||||||
|
"Invalid option encoding:",
|
||||||
|
"encoding must be a string or null to return a buffer,",
|
||||||
|
`got ${JSON.stringify(options.encoding)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `bom`
|
||||||
|
if (
|
||||||
|
options.bom === undefined ||
|
||||||
|
options.bom === null ||
|
||||||
|
options.bom === false
|
||||||
|
) {
|
||||||
|
options.bom = false;
|
||||||
|
} else if (options.bom !== true) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_BOM",
|
||||||
|
[
|
||||||
|
"Invalid option bom:",
|
||||||
|
"bom must be true,",
|
||||||
|
`got ${JSON.stringify(options.bom)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `cast`
|
||||||
|
options.cast_function = null;
|
||||||
|
if (
|
||||||
|
options.cast === undefined ||
|
||||||
|
options.cast === null ||
|
||||||
|
options.cast === false ||
|
||||||
|
options.cast === ""
|
||||||
|
) {
|
||||||
|
options.cast = undefined;
|
||||||
|
} else if (typeof options.cast === "function") {
|
||||||
|
options.cast_function = options.cast;
|
||||||
|
options.cast = true;
|
||||||
|
} else if (options.cast !== true) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_CAST",
|
||||||
|
[
|
||||||
|
"Invalid option cast:",
|
||||||
|
"cast must be true or a function,",
|
||||||
|
`got ${JSON.stringify(options.cast)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `cast_date`
|
||||||
|
if (
|
||||||
|
options.cast_date === undefined ||
|
||||||
|
options.cast_date === null ||
|
||||||
|
options.cast_date === false ||
|
||||||
|
options.cast_date === ""
|
||||||
|
) {
|
||||||
|
options.cast_date = false;
|
||||||
|
} else if (options.cast_date === true) {
|
||||||
|
options.cast_date = function (value) {
|
||||||
|
const date = Date.parse(value);
|
||||||
|
return !isNaN(date) ? new Date(date) : value;
|
||||||
|
};
|
||||||
|
} else if (typeof options.cast_date !== "function") {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_CAST_DATE",
|
||||||
|
[
|
||||||
|
"Invalid option cast_date:",
|
||||||
|
"cast_date must be true or a function,",
|
||||||
|
`got ${JSON.stringify(options.cast_date)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `columns`
|
||||||
|
options.cast_first_line_to_header = null;
|
||||||
|
if (options.columns === true) {
|
||||||
|
// Fields in the first line are converted as-is to columns
|
||||||
|
options.cast_first_line_to_header = undefined;
|
||||||
|
} else if (typeof options.columns === "function") {
|
||||||
|
options.cast_first_line_to_header = options.columns;
|
||||||
|
options.columns = true;
|
||||||
|
} else if (Array.isArray(options.columns)) {
|
||||||
|
options.columns = normalize_columns_array(options.columns);
|
||||||
|
} else if (
|
||||||
|
options.columns === undefined ||
|
||||||
|
options.columns === null ||
|
||||||
|
options.columns === false
|
||||||
|
) {
|
||||||
|
options.columns = false;
|
||||||
|
} else {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_COLUMNS",
|
||||||
|
[
|
||||||
|
"Invalid option columns:",
|
||||||
|
"expect an array, a function or true,",
|
||||||
|
`got ${JSON.stringify(options.columns)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `group_columns_by_name`
|
||||||
|
if (
|
||||||
|
options.group_columns_by_name === undefined ||
|
||||||
|
options.group_columns_by_name === null ||
|
||||||
|
options.group_columns_by_name === false
|
||||||
|
) {
|
||||||
|
options.group_columns_by_name = false;
|
||||||
|
} else if (options.group_columns_by_name !== true) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME",
|
||||||
|
[
|
||||||
|
"Invalid option group_columns_by_name:",
|
||||||
|
"expect an boolean,",
|
||||||
|
`got ${JSON.stringify(options.group_columns_by_name)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else if (options.columns === false) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME",
|
||||||
|
[
|
||||||
|
"Invalid option group_columns_by_name:",
|
||||||
|
"the `columns` mode must be activated.",
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `comment`
|
||||||
|
if (
|
||||||
|
options.comment === undefined ||
|
||||||
|
options.comment === null ||
|
||||||
|
options.comment === false ||
|
||||||
|
options.comment === ""
|
||||||
|
) {
|
||||||
|
options.comment = null;
|
||||||
|
} else {
|
||||||
|
if (typeof options.comment === "string") {
|
||||||
|
options.comment = Buffer.from(options.comment, options.encoding);
|
||||||
|
}
|
||||||
|
if (!Buffer.isBuffer(options.comment)) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_COMMENT",
|
||||||
|
[
|
||||||
|
"Invalid option comment:",
|
||||||
|
"comment must be a buffer or a string,",
|
||||||
|
`got ${JSON.stringify(options.comment)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `comment_no_infix`
|
||||||
|
if (
|
||||||
|
options.comment_no_infix === undefined ||
|
||||||
|
options.comment_no_infix === null ||
|
||||||
|
options.comment_no_infix === false
|
||||||
|
) {
|
||||||
|
options.comment_no_infix = false;
|
||||||
|
} else if (options.comment_no_infix !== true) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_COMMENT",
|
||||||
|
[
|
||||||
|
"Invalid option comment_no_infix:",
|
||||||
|
"value must be a boolean,",
|
||||||
|
`got ${JSON.stringify(options.comment_no_infix)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `delimiter`
|
||||||
|
const delimiter_json = JSON.stringify(options.delimiter);
|
||||||
|
if (!Array.isArray(options.delimiter))
|
||||||
|
options.delimiter = [options.delimiter];
|
||||||
|
if (options.delimiter.length === 0) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option delimiter:",
|
||||||
|
"delimiter must be a non empty string or buffer or array of string|buffer,",
|
||||||
|
`got ${delimiter_json}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
options.delimiter = options.delimiter.map(function (delimiter) {
|
||||||
|
if (delimiter === undefined || delimiter === null || delimiter === false) {
|
||||||
|
return Buffer.from(",", options.encoding);
|
||||||
|
}
|
||||||
|
if (typeof delimiter === "string") {
|
||||||
|
delimiter = Buffer.from(delimiter, options.encoding);
|
||||||
|
}
|
||||||
|
if (!Buffer.isBuffer(delimiter) || delimiter.length === 0) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option delimiter:",
|
||||||
|
"delimiter must be a non empty string or buffer or array of string|buffer,",
|
||||||
|
`got ${delimiter_json}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return delimiter;
|
||||||
|
});
|
||||||
|
// Normalize option `escape`
|
||||||
|
if (options.escape === undefined || options.escape === true) {
|
||||||
|
options.escape = Buffer.from('"', options.encoding);
|
||||||
|
} else if (typeof options.escape === "string") {
|
||||||
|
options.escape = Buffer.from(options.escape, options.encoding);
|
||||||
|
} else if (options.escape === null || options.escape === false) {
|
||||||
|
options.escape = null;
|
||||||
|
}
|
||||||
|
if (options.escape !== null) {
|
||||||
|
if (!Buffer.isBuffer(options.escape)) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `from`
|
||||||
|
if (options.from === undefined || options.from === null) {
|
||||||
|
options.from = 1;
|
||||||
|
} else {
|
||||||
|
if (typeof options.from === "string" && /\d+/.test(options.from)) {
|
||||||
|
options.from = parseInt(options.from);
|
||||||
|
}
|
||||||
|
if (Number.isInteger(options.from)) {
|
||||||
|
if (options.from < 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: from must be a positive integer, got ${JSON.stringify(opts.from)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: from must be an integer, got ${JSON.stringify(options.from)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `from_line`
|
||||||
|
if (options.from_line === undefined || options.from_line === null) {
|
||||||
|
options.from_line = 1;
|
||||||
|
} else {
|
||||||
|
if (
|
||||||
|
typeof options.from_line === "string" &&
|
||||||
|
/\d+/.test(options.from_line)
|
||||||
|
) {
|
||||||
|
options.from_line = parseInt(options.from_line);
|
||||||
|
}
|
||||||
|
if (Number.isInteger(options.from_line)) {
|
||||||
|
if (options.from_line <= 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: from_line must be a positive integer greater than 0, got ${JSON.stringify(opts.from_line)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: from_line must be an integer, got ${JSON.stringify(opts.from_line)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize options `ignore_last_delimiters`
|
||||||
|
if (
|
||||||
|
options.ignore_last_delimiters === undefined ||
|
||||||
|
options.ignore_last_delimiters === null
|
||||||
|
) {
|
||||||
|
options.ignore_last_delimiters = false;
|
||||||
|
} else if (typeof options.ignore_last_delimiters === "number") {
|
||||||
|
options.ignore_last_delimiters = Math.floor(options.ignore_last_delimiters);
|
||||||
|
if (options.ignore_last_delimiters === 0) {
|
||||||
|
options.ignore_last_delimiters = false;
|
||||||
|
}
|
||||||
|
} else if (typeof options.ignore_last_delimiters !== "boolean") {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_IGNORE_LAST_DELIMITERS",
|
||||||
|
[
|
||||||
|
"Invalid option `ignore_last_delimiters`:",
|
||||||
|
"the value must be a boolean value or an integer,",
|
||||||
|
`got ${JSON.stringify(options.ignore_last_delimiters)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (options.ignore_last_delimiters === true && options.columns === false) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_IGNORE_LAST_DELIMITERS_REQUIRES_COLUMNS",
|
||||||
|
[
|
||||||
|
"The option `ignore_last_delimiters`",
|
||||||
|
"requires the activation of the `columns` option",
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `info`
|
||||||
|
if (
|
||||||
|
options.info === undefined ||
|
||||||
|
options.info === null ||
|
||||||
|
options.info === false
|
||||||
|
) {
|
||||||
|
options.info = false;
|
||||||
|
} else if (options.info !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: info must be true, got ${JSON.stringify(options.info)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `max_record_size`
|
||||||
|
if (
|
||||||
|
options.max_record_size === undefined ||
|
||||||
|
options.max_record_size === null ||
|
||||||
|
options.max_record_size === false
|
||||||
|
) {
|
||||||
|
options.max_record_size = 0;
|
||||||
|
} else if (
|
||||||
|
Number.isInteger(options.max_record_size) &&
|
||||||
|
options.max_record_size >= 0
|
||||||
|
) {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
typeof options.max_record_size === "string" &&
|
||||||
|
/\d+/.test(options.max_record_size)
|
||||||
|
) {
|
||||||
|
options.max_record_size = parseInt(options.max_record_size);
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: max_record_size must be a positive integer, got ${JSON.stringify(options.max_record_size)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `objname`
|
||||||
|
if (
|
||||||
|
options.objname === undefined ||
|
||||||
|
options.objname === null ||
|
||||||
|
options.objname === false
|
||||||
|
) {
|
||||||
|
options.objname = undefined;
|
||||||
|
} else if (Buffer.isBuffer(options.objname)) {
|
||||||
|
if (options.objname.length === 0) {
|
||||||
|
throw new Error(`Invalid Option: objname must be a non empty buffer`);
|
||||||
|
}
|
||||||
|
if (options.encoding === null) {
|
||||||
|
// Don't call `toString`, leave objname as a buffer
|
||||||
|
} else {
|
||||||
|
options.objname = options.objname.toString(options.encoding);
|
||||||
|
}
|
||||||
|
} else if (typeof options.objname === "string") {
|
||||||
|
if (options.objname.length === 0) {
|
||||||
|
throw new Error(`Invalid Option: objname must be a non empty string`);
|
||||||
|
}
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (typeof options.objname === "number") {
|
||||||
|
// if(options.objname.length === 0){
|
||||||
|
// throw new Error(`Invalid Option: objname must be a non empty string`);
|
||||||
|
// }
|
||||||
|
// Great, nothing to do
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: objname must be a string or a buffer, got ${options.objname}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (options.objname !== undefined) {
|
||||||
|
if (typeof options.objname === "number") {
|
||||||
|
if (options.columns !== false) {
|
||||||
|
throw Error(
|
||||||
|
"Invalid Option: objname index cannot be combined with columns or be defined as a field",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// A string or a buffer
|
||||||
|
if (options.columns === false) {
|
||||||
|
throw Error(
|
||||||
|
"Invalid Option: objname field must be combined with columns or be defined as an index",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `on_record`
|
||||||
|
if (options.on_record === undefined || options.on_record === null) {
|
||||||
|
options.on_record = undefined;
|
||||||
|
} else if (typeof options.on_record !== "function") {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_ON_RECORD",
|
||||||
|
[
|
||||||
|
"Invalid option `on_record`:",
|
||||||
|
"expect a function,",
|
||||||
|
`got ${JSON.stringify(options.on_record)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `on_skip`
|
||||||
|
// options.on_skip ??= (err, chunk) => {
|
||||||
|
// this.emit('skip', err, chunk);
|
||||||
|
// };
|
||||||
|
if (
|
||||||
|
options.on_skip !== undefined &&
|
||||||
|
options.on_skip !== null &&
|
||||||
|
typeof options.on_skip !== "function"
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: on_skip must be a function, got ${JSON.stringify(options.on_skip)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `quote`
|
||||||
|
if (
|
||||||
|
options.quote === null ||
|
||||||
|
options.quote === false ||
|
||||||
|
options.quote === ""
|
||||||
|
) {
|
||||||
|
options.quote = null;
|
||||||
|
} else {
|
||||||
|
if (options.quote === undefined || options.quote === true) {
|
||||||
|
options.quote = Buffer.from('"', options.encoding);
|
||||||
|
} else if (typeof options.quote === "string") {
|
||||||
|
options.quote = Buffer.from(options.quote, options.encoding);
|
||||||
|
}
|
||||||
|
if (!Buffer.isBuffer(options.quote)) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `raw`
|
||||||
|
if (
|
||||||
|
options.raw === undefined ||
|
||||||
|
options.raw === null ||
|
||||||
|
options.raw === false
|
||||||
|
) {
|
||||||
|
options.raw = false;
|
||||||
|
} else if (options.raw !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: raw must be true, got ${JSON.stringify(options.raw)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `record_delimiter`
|
||||||
|
if (options.record_delimiter === undefined) {
|
||||||
|
options.record_delimiter = [];
|
||||||
|
} else if (
|
||||||
|
typeof options.record_delimiter === "string" ||
|
||||||
|
Buffer.isBuffer(options.record_delimiter)
|
||||||
|
) {
|
||||||
|
if (options.record_delimiter.length === 0) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_RECORD_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option `record_delimiter`:",
|
||||||
|
"value must be a non empty string or buffer,",
|
||||||
|
`got ${JSON.stringify(options.record_delimiter)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
options.record_delimiter = [options.record_delimiter];
|
||||||
|
} else if (!Array.isArray(options.record_delimiter)) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_RECORD_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option `record_delimiter`:",
|
||||||
|
"value must be a string, a buffer or array of string|buffer,",
|
||||||
|
`got ${JSON.stringify(options.record_delimiter)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
options.record_delimiter = options.record_delimiter.map(function (rd, i) {
|
||||||
|
if (typeof rd !== "string" && !Buffer.isBuffer(rd)) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_RECORD_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option `record_delimiter`:",
|
||||||
|
"value must be a string, a buffer or array of string|buffer",
|
||||||
|
`at index ${i},`,
|
||||||
|
`got ${JSON.stringify(rd)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else if (rd.length === 0) {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_OPTION_RECORD_DELIMITER",
|
||||||
|
[
|
||||||
|
"Invalid option `record_delimiter`:",
|
||||||
|
"value must be a non empty string or buffer",
|
||||||
|
`at index ${i},`,
|
||||||
|
`got ${JSON.stringify(rd)}`,
|
||||||
|
],
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (typeof rd === "string") {
|
||||||
|
rd = Buffer.from(rd, options.encoding);
|
||||||
|
}
|
||||||
|
return rd;
|
||||||
|
});
|
||||||
|
// Normalize option `relax_column_count`
|
||||||
|
if (typeof options.relax_column_count === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.relax_column_count === undefined ||
|
||||||
|
options.relax_column_count === null
|
||||||
|
) {
|
||||||
|
options.relax_column_count = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: relax_column_count must be a boolean, got ${JSON.stringify(options.relax_column_count)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (typeof options.relax_column_count_less === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.relax_column_count_less === undefined ||
|
||||||
|
options.relax_column_count_less === null
|
||||||
|
) {
|
||||||
|
options.relax_column_count_less = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: relax_column_count_less must be a boolean, got ${JSON.stringify(options.relax_column_count_less)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (typeof options.relax_column_count_more === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.relax_column_count_more === undefined ||
|
||||||
|
options.relax_column_count_more === null
|
||||||
|
) {
|
||||||
|
options.relax_column_count_more = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: relax_column_count_more must be a boolean, got ${JSON.stringify(options.relax_column_count_more)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `relax_quotes`
|
||||||
|
if (typeof options.relax_quotes === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.relax_quotes === undefined ||
|
||||||
|
options.relax_quotes === null
|
||||||
|
) {
|
||||||
|
options.relax_quotes = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: relax_quotes must be a boolean, got ${JSON.stringify(options.relax_quotes)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `skip_empty_lines`
|
||||||
|
if (typeof options.skip_empty_lines === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.skip_empty_lines === undefined ||
|
||||||
|
options.skip_empty_lines === null
|
||||||
|
) {
|
||||||
|
options.skip_empty_lines = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: skip_empty_lines must be a boolean, got ${JSON.stringify(options.skip_empty_lines)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `skip_records_with_empty_values`
|
||||||
|
if (typeof options.skip_records_with_empty_values === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.skip_records_with_empty_values === undefined ||
|
||||||
|
options.skip_records_with_empty_values === null
|
||||||
|
) {
|
||||||
|
options.skip_records_with_empty_values = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: skip_records_with_empty_values must be a boolean, got ${JSON.stringify(options.skip_records_with_empty_values)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `skip_records_with_error`
|
||||||
|
if (typeof options.skip_records_with_error === "boolean") {
|
||||||
|
// Great, nothing to do
|
||||||
|
} else if (
|
||||||
|
options.skip_records_with_error === undefined ||
|
||||||
|
options.skip_records_with_error === null
|
||||||
|
) {
|
||||||
|
options.skip_records_with_error = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: skip_records_with_error must be a boolean, got ${JSON.stringify(options.skip_records_with_error)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `rtrim`
|
||||||
|
if (
|
||||||
|
options.rtrim === undefined ||
|
||||||
|
options.rtrim === null ||
|
||||||
|
options.rtrim === false
|
||||||
|
) {
|
||||||
|
options.rtrim = false;
|
||||||
|
} else if (options.rtrim !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: rtrim must be a boolean, got ${JSON.stringify(options.rtrim)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `ltrim`
|
||||||
|
if (
|
||||||
|
options.ltrim === undefined ||
|
||||||
|
options.ltrim === null ||
|
||||||
|
options.ltrim === false
|
||||||
|
) {
|
||||||
|
options.ltrim = false;
|
||||||
|
} else if (options.ltrim !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: ltrim must be a boolean, got ${JSON.stringify(options.ltrim)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize option `trim`
|
||||||
|
if (
|
||||||
|
options.trim === undefined ||
|
||||||
|
options.trim === null ||
|
||||||
|
options.trim === false
|
||||||
|
) {
|
||||||
|
options.trim = false;
|
||||||
|
} else if (options.trim !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: trim must be a boolean, got ${JSON.stringify(options.trim)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Normalize options `trim`, `ltrim` and `rtrim`
|
||||||
|
if (options.trim === true && opts.ltrim !== false) {
|
||||||
|
options.ltrim = true;
|
||||||
|
} else if (options.ltrim !== true) {
|
||||||
|
options.ltrim = false;
|
||||||
|
}
|
||||||
|
if (options.trim === true && opts.rtrim !== false) {
|
||||||
|
options.rtrim = true;
|
||||||
|
} else if (options.rtrim !== true) {
|
||||||
|
options.rtrim = false;
|
||||||
|
}
|
||||||
|
// Normalize option `to`
|
||||||
|
if (options.to === undefined || options.to === null) {
|
||||||
|
options.to = -1;
|
||||||
|
} else {
|
||||||
|
if (typeof options.to === "string" && /\d+/.test(options.to)) {
|
||||||
|
options.to = parseInt(options.to);
|
||||||
|
}
|
||||||
|
if (Number.isInteger(options.to)) {
|
||||||
|
if (options.to <= 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: to must be a positive integer greater than 0, got ${JSON.stringify(opts.to)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: to must be an integer, got ${JSON.stringify(opts.to)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalize option `to_line`
|
||||||
|
if (options.to_line === undefined || options.to_line === null) {
|
||||||
|
options.to_line = -1;
|
||||||
|
} else {
|
||||||
|
if (typeof options.to_line === "string" && /\d+/.test(options.to_line)) {
|
||||||
|
options.to_line = parseInt(options.to_line);
|
||||||
|
}
|
||||||
|
if (Number.isInteger(options.to_line)) {
|
||||||
|
if (options.to_line <= 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: to_line must be a positive integer greater than 0, got ${JSON.stringify(opts.to_line)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid Option: to_line must be an integer, got ${JSON.stringify(opts.to_line)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
};
|
||||||
|
|
||||||
|
export { normalize_options };
|
283
node_modules/csv-parse/lib/index.d.ts
generated
vendored
Normal file
283
node_modules/csv-parse/lib/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as stream from "stream";
|
||||||
|
|
||||||
|
export type Callback = (err: CsvError | undefined, records: any | undefined, info: Info) => void;
|
||||||
|
|
||||||
|
export interface Parser extends stream.Transform {}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
constructor(options: Options);
|
||||||
|
|
||||||
|
__push(line: any): any;
|
||||||
|
|
||||||
|
__write(chars: any, end: any, callback: any): any;
|
||||||
|
|
||||||
|
readonly options: Options
|
||||||
|
|
||||||
|
readonly info: Info;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CastingContext {
|
||||||
|
readonly column: number | string;
|
||||||
|
readonly empty_lines: number;
|
||||||
|
readonly error: CsvError;
|
||||||
|
readonly header: boolean;
|
||||||
|
readonly index: number;
|
||||||
|
readonly quoting: boolean;
|
||||||
|
readonly lines: number;
|
||||||
|
readonly records: number;
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CastingFunction = (value: string, context: CastingContext) => any;
|
||||||
|
|
||||||
|
export type CastingDateFunction = (value: string, context: CastingContext) => Date;
|
||||||
|
|
||||||
|
export type ColumnOption = string | undefined | null | false | { name: string };
|
||||||
|
|
||||||
|
/*
|
||||||
|
Note, could not `extends stream.TransformOptions` because encoding can be
|
||||||
|
BufferEncoding and undefined as well as null which is not defined in the
|
||||||
|
extended type.
|
||||||
|
*/
|
||||||
|
export interface Options {
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to native types.
|
||||||
|
* @deprecated Use {@link cast}
|
||||||
|
*/
|
||||||
|
auto_parse?: boolean | CastingFunction;
|
||||||
|
autoParse?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
|
||||||
|
* @deprecated Use {@link cast_date}
|
||||||
|
*/
|
||||||
|
auto_parse_date?: boolean | CastingDateFunction;
|
||||||
|
autoParseDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
|
||||||
|
*/
|
||||||
|
bom?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to native types.
|
||||||
|
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
|
||||||
|
*/
|
||||||
|
cast?: boolean | CastingFunction;
|
||||||
|
/**
|
||||||
|
* If true, the parser will attempt to convert input string to dates.
|
||||||
|
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
|
||||||
|
*/
|
||||||
|
cast_date?: boolean | CastingDateFunction;
|
||||||
|
castDate?: boolean | CastingDateFunction;
|
||||||
|
/**
|
||||||
|
* List of fields as an array,
|
||||||
|
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
|
||||||
|
* default to null,
|
||||||
|
* affect the result data set in the sense that records will be objects instead of arrays.
|
||||||
|
*/
|
||||||
|
columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
|
||||||
|
/**
|
||||||
|
* Convert values into an array of values when columns are activated and
|
||||||
|
* when multiple columns of the same name are found.
|
||||||
|
*/
|
||||||
|
group_columns_by_name?: boolean;
|
||||||
|
groupColumnsByName?: boolean;
|
||||||
|
/**
|
||||||
|
* Treat all the characters after this one as a comment, default to '' (disabled).
|
||||||
|
*/
|
||||||
|
comment?: string;
|
||||||
|
/**
|
||||||
|
* Restrict the definition of comments to a full line. Comment characters
|
||||||
|
* defined in the middle of the line are not interpreted as such. The
|
||||||
|
* option require the activation of comments.
|
||||||
|
*/
|
||||||
|
comment_no_infix?: boolean;
|
||||||
|
/**
|
||||||
|
* Set the field delimiter. One character only, defaults to comma.
|
||||||
|
*/
|
||||||
|
delimiter?: string | string[] | Buffer;
|
||||||
|
/**
|
||||||
|
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
|
||||||
|
*/
|
||||||
|
encoding?: BufferEncoding | undefined;
|
||||||
|
/**
|
||||||
|
* Set the escape character, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
escape?: string | null | false | Buffer;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested number of records.
|
||||||
|
*/
|
||||||
|
from?: number;
|
||||||
|
/**
|
||||||
|
* Start handling records from the requested line number.
|
||||||
|
*/
|
||||||
|
from_line?: number;
|
||||||
|
fromLine?: number;
|
||||||
|
/**
|
||||||
|
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
|
||||||
|
*/
|
||||||
|
ignore_last_delimiters?: boolean | number;
|
||||||
|
/**
|
||||||
|
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
|
||||||
|
*/
|
||||||
|
info?: boolean;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
ltrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
|
||||||
|
* used to guard against a wrong delimiter or record_delimiter,
|
||||||
|
* default to 128000 characters.
|
||||||
|
*/
|
||||||
|
max_record_size?: number;
|
||||||
|
maxRecordSize?: number;
|
||||||
|
/**
|
||||||
|
* Name of header-record title to name objects by.
|
||||||
|
*/
|
||||||
|
objname?: string;
|
||||||
|
/**
|
||||||
|
* Alter and filter records by executing a user defined function.
|
||||||
|
*/
|
||||||
|
on_record?: (record: any, context: CastingContext) => any;
|
||||||
|
onRecord?: (record: any, context: CastingContext) => any;
|
||||||
|
/**
|
||||||
|
* Optional character surrounding a field, one character only, defaults to double quotes.
|
||||||
|
*/
|
||||||
|
quote?: string | boolean | Buffer | null;
|
||||||
|
/**
|
||||||
|
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
|
||||||
|
*/
|
||||||
|
raw?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count?: boolean;
|
||||||
|
relaxColumnCount?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_less?: boolean;
|
||||||
|
relaxColumnCountLess?: boolean;
|
||||||
|
/**
|
||||||
|
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
|
||||||
|
*/
|
||||||
|
relax_column_count_more?: boolean;
|
||||||
|
relaxColumnCountMore?: boolean;
|
||||||
|
/**
|
||||||
|
* Preserve quotes inside unquoted field.
|
||||||
|
*/
|
||||||
|
relax_quotes?: boolean;
|
||||||
|
relaxQuotes?: boolean;
|
||||||
|
/**
|
||||||
|
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
|
||||||
|
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
|
||||||
|
*/
|
||||||
|
record_delimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
recordDelimiter?: string | string[] | Buffer | Buffer[];
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
rtrim?: boolean;
|
||||||
|
/**
|
||||||
|
* Dont generate empty values for empty lines.
|
||||||
|
* Defaults to false
|
||||||
|
*/
|
||||||
|
skip_empty_lines?: boolean;
|
||||||
|
skipEmptyLines?: boolean;
|
||||||
|
/**
|
||||||
|
* Skip a line with error found inside and directly go process the next line.
|
||||||
|
*/
|
||||||
|
skip_records_with_error?: boolean;
|
||||||
|
skipRecordsWithError?: boolean;
|
||||||
|
/**
|
||||||
|
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
|
||||||
|
*/
|
||||||
|
skip_records_with_empty_values?: boolean;
|
||||||
|
skipRecordsWithEmptyValues?: boolean;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested number of records.
|
||||||
|
*/
|
||||||
|
to?: number;
|
||||||
|
/**
|
||||||
|
* Stop handling records after the requested line number.
|
||||||
|
*/
|
||||||
|
to_line?: number;
|
||||||
|
toLine?: number;
|
||||||
|
/**
|
||||||
|
* If true, ignore whitespace immediately around the delimiter, defaults to false.
|
||||||
|
* Does not remove whitespace in a quoted field.
|
||||||
|
*/
|
||||||
|
trim?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Info {
|
||||||
|
/**
|
||||||
|
* Count the number of lines being fully commented.
|
||||||
|
*/
|
||||||
|
readonly comment_lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed empty lines.
|
||||||
|
*/
|
||||||
|
readonly empty_lines: number;
|
||||||
|
/**
|
||||||
|
* The number of lines encountered in the source dataset, start at 1 for the first line.
|
||||||
|
*/
|
||||||
|
readonly lines: number;
|
||||||
|
/**
|
||||||
|
* Count the number of processed records.
|
||||||
|
*/
|
||||||
|
readonly records: number;
|
||||||
|
/**
|
||||||
|
* Count of the number of processed bytes.
|
||||||
|
*/
|
||||||
|
readonly bytes: number;
|
||||||
|
/**
|
||||||
|
* Number of non uniform records when `relax_column_count` is true.
|
||||||
|
*/
|
||||||
|
readonly invalid_field_length: number;
|
||||||
|
/**
|
||||||
|
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
|
||||||
|
*/
|
||||||
|
readonly columns: boolean | { name: string }[] | { disabled: true }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CsvErrorCode =
|
||||||
|
'CSV_INVALID_OPTION_BOM'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST'
|
||||||
|
| 'CSV_INVALID_OPTION_CAST_DATE'
|
||||||
|
| 'CSV_INVALID_OPTION_COLUMNS'
|
||||||
|
| 'CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME'
|
||||||
|
| 'CSV_INVALID_OPTION_COMMENT'
|
||||||
|
| 'CSV_INVALID_OPTION_DELIMITER'
|
||||||
|
| 'CSV_INVALID_OPTION_ON_RECORD'
|
||||||
|
| 'CSV_INVALID_CLOSING_QUOTE'
|
||||||
|
| 'INVALID_OPENING_QUOTE'
|
||||||
|
| 'CSV_INVALID_COLUMN_MAPPING'
|
||||||
|
| 'CSV_INVALID_ARGUMENT'
|
||||||
|
| 'CSV_INVALID_COLUMN_DEFINITION'
|
||||||
|
| 'CSV_MAX_RECORD_SIZE'
|
||||||
|
| 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
|
||||||
|
| 'CSV_QUOTE_NOT_CLOSED'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_FIELDS_LENGTH'
|
||||||
|
| 'CSV_RECORD_INCONSISTENT_COLUMNS'
|
||||||
|
| 'CSV_OPTION_COLUMNS_MISSING_NAME'
|
||||||
|
|
||||||
|
export class CsvError extends Error {
|
||||||
|
readonly code: CsvErrorCode;
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
|
||||||
|
}
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(input: Buffer | string, callback?: Callback): Parser;
|
||||||
|
declare function parse(options?: Options, callback?: Callback): Parser;
|
||||||
|
declare function parse(callback?: Callback): Parser;
|
||||||
|
|
||||||
|
// export default parse;
|
||||||
|
export { parse }
|
137
node_modules/csv-parse/lib/index.js
generated
vendored
Normal file
137
node_modules/csv-parse/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
||||||
|
/*
|
||||||
|
CSV Parse
|
||||||
|
|
||||||
|
Please look at the [project documentation](https://csv.js.org/parse/) for
|
||||||
|
additional information.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Transform } from "stream";
|
||||||
|
import { is_object } from "./utils/is_object.js";
|
||||||
|
import { transform } from "./api/index.js";
|
||||||
|
import { CsvError } from "./api/CsvError.js";
|
||||||
|
|
||||||
|
class Parser extends Transform {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
super({ ...{ readableObjectMode: true }, ...opts, encoding: null });
|
||||||
|
this.api = transform({
|
||||||
|
on_skip: (err, chunk) => {
|
||||||
|
this.emit("skip", err, chunk);
|
||||||
|
},
|
||||||
|
...opts,
|
||||||
|
});
|
||||||
|
// Backward compatibility
|
||||||
|
this.state = this.api.state;
|
||||||
|
this.options = this.api.options;
|
||||||
|
this.info = this.api.info;
|
||||||
|
}
|
||||||
|
// Implementation of `Transform._transform`
|
||||||
|
_transform(buf, _, callback) {
|
||||||
|
if (this.state.stop === true) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const err = this.api.parse(
|
||||||
|
buf,
|
||||||
|
false,
|
||||||
|
(record) => {
|
||||||
|
this.push(record);
|
||||||
|
},
|
||||||
|
() => {
|
||||||
|
this.push(null);
|
||||||
|
this.end();
|
||||||
|
// Fix #333 and break #410
|
||||||
|
// ko: api.stream.iterator.coffee
|
||||||
|
// ko with v21.4.0, ok with node v20.5.1: api.stream.finished # aborted (with generate())
|
||||||
|
// ko: api.stream.finished # aborted (with Readable)
|
||||||
|
// this.destroy()
|
||||||
|
// Fix #410 and partially break #333
|
||||||
|
// ok: api.stream.iterator.coffee
|
||||||
|
// ok: api.stream.finished # aborted (with generate())
|
||||||
|
// broken: api.stream.finished # aborted (with Readable)
|
||||||
|
this.on("end", this.destroy);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (err !== undefined) {
|
||||||
|
this.state.stop = true;
|
||||||
|
}
|
||||||
|
callback(err);
|
||||||
|
}
|
||||||
|
// Implementation of `Transform._flush`
|
||||||
|
_flush(callback) {
|
||||||
|
if (this.state.stop === true) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const err = this.api.parse(
|
||||||
|
undefined,
|
||||||
|
true,
|
||||||
|
(record) => {
|
||||||
|
this.push(record);
|
||||||
|
},
|
||||||
|
() => {
|
||||||
|
this.push(null);
|
||||||
|
this.on("end", this.destroy);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
callback(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parse = function () {
|
||||||
|
let data, options, callback;
|
||||||
|
for (const i in arguments) {
|
||||||
|
const argument = arguments[i];
|
||||||
|
const type = typeof argument;
|
||||||
|
if (
|
||||||
|
data === undefined &&
|
||||||
|
(typeof argument === "string" || Buffer.isBuffer(argument))
|
||||||
|
) {
|
||||||
|
data = argument;
|
||||||
|
} else if (options === undefined && is_object(argument)) {
|
||||||
|
options = argument;
|
||||||
|
} else if (callback === undefined && type === "function") {
|
||||||
|
callback = argument;
|
||||||
|
} else {
|
||||||
|
throw new CsvError(
|
||||||
|
"CSV_INVALID_ARGUMENT",
|
||||||
|
["Invalid argument:", `got ${JSON.stringify(argument)} at index ${i}`],
|
||||||
|
options || {},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const parser = new Parser(options);
|
||||||
|
if (callback) {
|
||||||
|
const records =
|
||||||
|
options === undefined || options.objname === undefined ? [] : {};
|
||||||
|
parser.on("readable", function () {
|
||||||
|
let record;
|
||||||
|
while ((record = this.read()) !== null) {
|
||||||
|
if (options === undefined || options.objname === undefined) {
|
||||||
|
records.push(record);
|
||||||
|
} else {
|
||||||
|
records[record[0]] = record[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
parser.on("error", function (err) {
|
||||||
|
callback(err, undefined, parser.api.__infoDataSet());
|
||||||
|
});
|
||||||
|
parser.on("end", function () {
|
||||||
|
callback(undefined, records, parser.api.__infoDataSet());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (data !== undefined) {
|
||||||
|
const writer = function () {
|
||||||
|
parser.write(data);
|
||||||
|
parser.end();
|
||||||
|
};
|
||||||
|
// Support Deno, Rollup doesnt provide a shim for setImmediate
|
||||||
|
if (typeof setImmediate === "function") {
|
||||||
|
setImmediate(writer);
|
||||||
|
} else {
|
||||||
|
setTimeout(writer, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parser;
|
||||||
|
};
|
||||||
|
|
||||||
|
// export default parse
|
||||||
|
export { parse, Parser, CsvError };
|
11
node_modules/csv-parse/lib/stream.d.ts
generated
vendored
Normal file
11
node_modules/csv-parse/lib/stream.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
import { Options } from './index.js';
|
||||||
|
|
||||||
|
declare function parse(options?: Options): TransformStream;
|
||||||
|
// export default parse;
|
||||||
|
export { parse };
|
||||||
|
|
||||||
|
export {
|
||||||
|
CastingContext, CastingFunction, CastingDateFunction,
|
||||||
|
ColumnOption, Options, Info, CsvErrorCode, CsvError
|
||||||
|
} from './index.js';
|
36
node_modules/csv-parse/lib/stream.js
generated
vendored
Normal file
36
node_modules/csv-parse/lib/stream.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import { TransformStream, CountQueuingStrategy } from "node:stream/web";
|
||||||
|
import { transform } from "./api/index.js";
|
||||||
|
|
||||||
|
const parse = (opts) => {
|
||||||
|
const api = transform(opts);
|
||||||
|
let controller;
|
||||||
|
const enqueue = (record) => {
|
||||||
|
controller.enqueue(record);
|
||||||
|
};
|
||||||
|
const terminate = () => {
|
||||||
|
controller.terminate();
|
||||||
|
};
|
||||||
|
return new TransformStream(
|
||||||
|
{
|
||||||
|
start(ctr) {
|
||||||
|
controller = ctr;
|
||||||
|
},
|
||||||
|
transform(chunk) {
|
||||||
|
const error = api.parse(chunk, false, enqueue, terminate);
|
||||||
|
if (error) {
|
||||||
|
controller.error(error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
flush() {
|
||||||
|
const error = api.parse(undefined, true, enqueue, terminate);
|
||||||
|
if (error) {
|
||||||
|
controller.error(error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
new CountQueuingStrategy({ highWaterMark: 1024 }),
|
||||||
|
new CountQueuingStrategy({ highWaterMark: 1024 }),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export { parse };
|
11
node_modules/csv-parse/lib/sync.d.ts
generated
vendored
Normal file
11
node_modules/csv-parse/lib/sync.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
import { Options } from './index.js';
|
||||||
|
|
||||||
|
declare function parse(input: Buffer | string, options?: Options): any;
|
||||||
|
// export default parse;
|
||||||
|
export { parse };
|
||||||
|
|
||||||
|
export {
|
||||||
|
CastingContext, CastingFunction, CastingDateFunction,
|
||||||
|
ColumnOption, Options, Info, CsvErrorCode, CsvError
|
||||||
|
} from './index.js';
|
25
node_modules/csv-parse/lib/sync.js
generated
vendored
Normal file
25
node_modules/csv-parse/lib/sync.js
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import { CsvError, transform } from "./api/index.js";
|
||||||
|
|
||||||
|
const parse = function (data, opts = {}) {
|
||||||
|
if (typeof data === "string") {
|
||||||
|
data = Buffer.from(data);
|
||||||
|
}
|
||||||
|
const records = opts && opts.objname ? {} : [];
|
||||||
|
const parser = transform(opts);
|
||||||
|
const push = (record) => {
|
||||||
|
if (parser.options.objname === undefined) records.push(record);
|
||||||
|
else {
|
||||||
|
records[record[0]] = record[1];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const close = () => {};
|
||||||
|
const err1 = parser.parse(data, false, push, close);
|
||||||
|
if (err1 !== undefined) throw err1;
|
||||||
|
const err2 = parser.parse(undefined, true, push, close);
|
||||||
|
if (err2 !== undefined) throw err2;
|
||||||
|
return records;
|
||||||
|
};
|
||||||
|
|
||||||
|
// export default parse
|
||||||
|
export { parse };
|
||||||
|
export { CsvError };
|
63
node_modules/csv-parse/lib/utils/ResizeableBuffer.js
generated
vendored
Normal file
63
node_modules/csv-parse/lib/utils/ResizeableBuffer.js
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
class ResizeableBuffer {
|
||||||
|
constructor(size = 100) {
|
||||||
|
this.size = size;
|
||||||
|
this.length = 0;
|
||||||
|
this.buf = Buffer.allocUnsafe(size);
|
||||||
|
}
|
||||||
|
prepend(val) {
|
||||||
|
if (Buffer.isBuffer(val)) {
|
||||||
|
const length = this.length + val.length;
|
||||||
|
if (length >= this.size) {
|
||||||
|
this.resize();
|
||||||
|
if (length >= this.size) {
|
||||||
|
throw Error("INVALID_BUFFER_STATE");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const buf = this.buf;
|
||||||
|
this.buf = Buffer.allocUnsafe(this.size);
|
||||||
|
val.copy(this.buf, 0);
|
||||||
|
buf.copy(this.buf, val.length);
|
||||||
|
this.length += val.length;
|
||||||
|
} else {
|
||||||
|
const length = this.length++;
|
||||||
|
if (length === this.size) {
|
||||||
|
this.resize();
|
||||||
|
}
|
||||||
|
const buf = this.clone();
|
||||||
|
this.buf[0] = val;
|
||||||
|
buf.copy(this.buf, 1, 0, length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
append(val) {
|
||||||
|
const length = this.length++;
|
||||||
|
if (length === this.size) {
|
||||||
|
this.resize();
|
||||||
|
}
|
||||||
|
this.buf[length] = val;
|
||||||
|
}
|
||||||
|
clone() {
|
||||||
|
return Buffer.from(this.buf.slice(0, this.length));
|
||||||
|
}
|
||||||
|
resize() {
|
||||||
|
const length = this.length;
|
||||||
|
this.size = this.size * 2;
|
||||||
|
const buf = Buffer.allocUnsafe(this.size);
|
||||||
|
this.buf.copy(buf, 0, 0, length);
|
||||||
|
this.buf = buf;
|
||||||
|
}
|
||||||
|
toString(encoding) {
|
||||||
|
if (encoding) {
|
||||||
|
return this.buf.slice(0, this.length).toString(encoding);
|
||||||
|
} else {
|
||||||
|
return Uint8Array.prototype.slice.call(this.buf.slice(0, this.length));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return this.toString("utf8");
|
||||||
|
}
|
||||||
|
reset() {
|
||||||
|
this.length = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ResizeableBuffer;
|
5
node_modules/csv-parse/lib/utils/is_object.js
generated
vendored
Normal file
5
node_modules/csv-parse/lib/utils/is_object.js
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
const is_object = function (obj) {
|
||||||
|
return typeof obj === "object" && obj !== null && !Array.isArray(obj);
|
||||||
|
};
|
||||||
|
|
||||||
|
export { is_object };
|
7
node_modules/csv-parse/lib/utils/underscore.js
generated
vendored
Normal file
7
node_modules/csv-parse/lib/utils/underscore.js
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
const underscore = function (str) {
|
||||||
|
return str.replace(/([A-Z])/g, function (_, match) {
|
||||||
|
return "_" + match.toLowerCase();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export { underscore };
|
156
node_modules/csv-parse/package.json
generated
vendored
Normal file
156
node_modules/csv-parse/package.json
generated
vendored
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
{
|
||||||
|
"version": "5.6.0",
|
||||||
|
"name": "csv-parse",
|
||||||
|
"description": "CSV parsing implementing the Node.js `stream.Transform` API",
|
||||||
|
"keywords": [
|
||||||
|
"csv",
|
||||||
|
"parse",
|
||||||
|
"parser",
|
||||||
|
"convert",
|
||||||
|
"tsv",
|
||||||
|
"stream",
|
||||||
|
"backend",
|
||||||
|
"frontend"
|
||||||
|
],
|
||||||
|
"author": "David Worms <david@adaltas.com> (https://www.adaltas.com)",
|
||||||
|
"contributors": [
|
||||||
|
"David Worms <david@adaltas.com> (https://www.adaltas.com)",
|
||||||
|
"Will White (https://github.com/willwhite)",
|
||||||
|
"Justin Latimer (https://github.com/justinlatimer)",
|
||||||
|
"jonseymour (https://github.com/jonseymour)",
|
||||||
|
"pascalopitz (https://github.com/pascalopitz)",
|
||||||
|
"Josh Pschorr (https://github.com/jpschorr)",
|
||||||
|
"Elad Ben-Israel (https://github.com/eladb)",
|
||||||
|
"Philippe Plantier (https://github.com/phipla)",
|
||||||
|
"Tim Oxley (https://github.com/timoxley)",
|
||||||
|
"Damon Oehlman (https://github.com/DamonOehlman)",
|
||||||
|
"Alexandru Topliceanu (https://github.com/topliceanu)",
|
||||||
|
"Visup (https://github.com/visup)",
|
||||||
|
"Edmund von der Burg (https://github.com/evdb)",
|
||||||
|
"Douglas Christopher Wilson (https://github.com/dougwilson)",
|
||||||
|
"Joe Eaves (https://github.com/Joeasaurus)",
|
||||||
|
"Mark Stosberg (https://github.com/markstos)"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"import": {
|
||||||
|
"types": "./lib/index.d.ts",
|
||||||
|
"default": "./lib/index.js"
|
||||||
|
},
|
||||||
|
"require": {
|
||||||
|
"types": "./dist/cjs/index.d.cts",
|
||||||
|
"default": "./dist/cjs/index.cjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"./sync": {
|
||||||
|
"import": {
|
||||||
|
"types": "./lib/sync.d.ts",
|
||||||
|
"default": "./lib/sync.js"
|
||||||
|
},
|
||||||
|
"require": {
|
||||||
|
"types": "./dist/cjs/sync.d.cts",
|
||||||
|
"default": "./dist/cjs/sync.cjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"./stream": {
|
||||||
|
"import": {
|
||||||
|
"types": "./lib/stream.d.ts",
|
||||||
|
"default": "./lib/stream.js"
|
||||||
|
},
|
||||||
|
"require": {
|
||||||
|
"types": "./dist/cjs/stream.d.cts",
|
||||||
|
"default": "./dist/cjs/stream.cjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"./browser/esm": {
|
||||||
|
"types": "./dist/esm/index.d.ts",
|
||||||
|
"default": "./dist/esm/index.js"
|
||||||
|
},
|
||||||
|
"./browser/esm/sync": {
|
||||||
|
"types": "./dist/esm/sync.d.ts",
|
||||||
|
"default": "./dist/esm/sync.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.15.0",
|
||||||
|
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||||
|
"@types/mocha": "^10.0.9",
|
||||||
|
"@types/node": "^22.9.1",
|
||||||
|
"coffeescript": "^2.7.0",
|
||||||
|
"csv-generate": "^4.4.2",
|
||||||
|
"csv-spectrum": "^2.0.0",
|
||||||
|
"each": "^2.7.2",
|
||||||
|
"eslint": "^9.15.0",
|
||||||
|
"eslint-config-prettier": "^9.1.0",
|
||||||
|
"eslint-plugin-mocha": "^10.5.0",
|
||||||
|
"eslint-plugin-prettier": "^5.2.1",
|
||||||
|
"mocha": "^10.8.2",
|
||||||
|
"pad": "^3.3.0",
|
||||||
|
"prettier": "^3.3.3",
|
||||||
|
"rollup": "^4.27.3",
|
||||||
|
"rollup-plugin-node-builtins": "^2.1.2",
|
||||||
|
"rollup-plugin-node-globals": "^1.4.0",
|
||||||
|
"should": "^13.2.3",
|
||||||
|
"stream-transform": "^3.3.3",
|
||||||
|
"ts-node": "^10.9.2",
|
||||||
|
"typescript": "^5.6.3"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"lib"
|
||||||
|
],
|
||||||
|
"homepage": "https://csv.js.org/parse",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./dist/cjs/index.cjs",
|
||||||
|
"mocha": {
|
||||||
|
"inline-diffs": true,
|
||||||
|
"loader": "./test/loaders/all.js",
|
||||||
|
"recursive": true,
|
||||||
|
"reporter": "spec",
|
||||||
|
"require": [
|
||||||
|
"should"
|
||||||
|
],
|
||||||
|
"throw-deprecation": false,
|
||||||
|
"timeout": 40000
|
||||||
|
},
|
||||||
|
"lint-staged": {
|
||||||
|
"*.js": "npm run lint:fix",
|
||||||
|
"*.md": "prettier -w"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/adaltas/node-csv.git",
|
||||||
|
"directory": "packages/csv-parse"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "npm run build:rollup && npm run build:ts",
|
||||||
|
"build:rollup": "npx rollup -c",
|
||||||
|
"build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm",
|
||||||
|
"postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;",
|
||||||
|
"lint:check": "eslint",
|
||||||
|
"lint:fix": "eslint --fix",
|
||||||
|
"lint:ts": "tsc --noEmit true",
|
||||||
|
"preversion": "npm run build && git add dist",
|
||||||
|
"test": "mocha 'test/**/*.{coffee,ts}'",
|
||||||
|
"test:legacy": "mocha --ignore test/api.web_stream.coffee --ignore test/api.web_stream.ts --ignore test/api.stream.finished.coffee --ignore test/api.stream.iterator.coffee --loader=./test/loaders/legacy/all.js 'test/**/*.{coffee,ts}'"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"types": "dist/esm/index.d.ts",
|
||||||
|
"typesVersions": {
|
||||||
|
"*": {
|
||||||
|
".": [
|
||||||
|
"dist/esm/index.d.ts"
|
||||||
|
],
|
||||||
|
"sync": [
|
||||||
|
"dist/esm/sync.d.ts"
|
||||||
|
],
|
||||||
|
"browser/esm": [
|
||||||
|
"dist/esm/index.d.ts"
|
||||||
|
],
|
||||||
|
"browser/esm/sync": [
|
||||||
|
"dist/esm/sync.d.ts"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gitHead": "cc1235a58de98dd9eab0665c7b1d03213e9633c7"
|
||||||
|
}
|
18
package-lock.json
generated
Normal file
18
package-lock.json
generated
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"name": "awesome-deepseek-integration",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"dependencies": {
|
||||||
|
"csv-parse": "^5.6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/csv-parse": {
|
||||||
|
"version": "5.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.6.0.tgz",
|
||||||
|
"integrity": "sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q==",
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
5
package.json
Normal file
5
package.json
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"dependencies": {
|
||||||
|
"csv-parse": "^5.6.0"
|
||||||
|
}
|
||||||
|
}
|
Loading…
Add table
Reference in a new issue