master
raj 2 years ago
commit 803e6c0fe0

97
node_modules/.package-lock.json generated vendored

@ -3253,6 +3253,11 @@
"url": "https://github.com/sponsors/ueberdosis"
}
},
"node_modules/@tokenizer/token": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz",
"integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="
},
"node_modules/@tootallnate/once": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
@ -5711,6 +5716,22 @@
"xtend": "^4.0.0"
}
},
"node_modules/file-type": {
"version": "18.5.0",
"resolved": "https://registry.npmjs.org/file-type/-/file-type-18.5.0.tgz",
"integrity": "sha512-yvpl5U868+V6PqXHMmsESpg6unQ5GfnPssl4dxdJudBrr9qy7Fddt7EVX1VLlddFfe8Gj9N7goCZH22FXuSQXQ==",
"dependencies": {
"readable-web-to-node-stream": "^3.0.2",
"strtok3": "^7.0.0",
"token-types": "^5.0.1"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"url": "https://github.com/sindresorhus/file-type?sponsor=1"
}
},
"node_modules/file-uri-to-path": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-2.0.0.tgz",
@ -6560,6 +6581,20 @@
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
},
"node_modules/image-type": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/image-type/-/image-type-5.2.0.tgz",
"integrity": "sha512-f0+6qHeGfyEh1HhFGPUWZb+Dqqm6raKeeAR6Opt01wBBIQL32/1wpZkPQm8gcliB/Ws6oiX2ofFYXB57+CV0iQ==",
"dependencies": {
"file-type": "^18.1.0"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/immediate": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
@ -8282,6 +8317,18 @@
"through": "~2.3"
}
},
"node_modules/peek-readable": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz",
"integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==",
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
@ -9010,6 +9057,21 @@
"node": ">= 6"
}
},
"node_modules/readable-web-to-node-stream": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz",
"integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==",
"dependencies": {
"readable-stream": "^3.6.0"
},
"engines": {
"node": ">=8"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/readdir-glob": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz",
@ -9893,6 +9955,25 @@
"node": ">=0.10.0"
}
},
<<<<<<< HEAD
=======
"node_modules/strtok3": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz",
"integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==",
"dependencies": {
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.0.0"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
>>>>>>> 127251e2ebfc5c15d56a359b7f12754359034ee7
"node_modules/stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
@ -10212,6 +10293,22 @@
"node": ">=0.6"
}
},
"node_modules/token-types": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz",
"integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==",
"dependencies": {
"@tokenizer/token": "^0.3.0",
"ieee754": "^1.2.1"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/touch": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",

@ -0,0 +1,19 @@
[![npm version](https://badge.fury.io/js/%40tokenizer%2Ftoken.svg)](https://www.npmjs.com/package/@tokenizer/token)
[![npm downloads](http://img.shields.io/npm/dm/@tokenizer/token.svg)](https://npmcharts.com/compare/@tokenizer/token?interval=30)
# @tokenizer/token
TypeScript definition of an [strtok3](https://github.com/Borewit/strtok3) token.
## Licence
(The MIT License)
Copyright (c) 2020 Borewit
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,30 @@
/**
* Read-only token
* See https://github.com/Borewit/strtok3 for more information
*/
export interface IGetToken<Value, Array extends Uint8Array = Uint8Array> {
/**
* Length of encoded token in bytes
*/
len: number;
/**
* Decode value from buffer at offset
* @param array - Uint8Array to read the decoded value from
* @param offset - Decode offset
* @return decoded value
*/
get(array: Array, offset: number): Value;
}
export interface IToken<Value, Array extends Uint8Array = Uint8Array> extends IGetToken<Value, Array> {
/**
* Encode value to buffer
* @param array - Uint8Array to write the encoded value to
* @param offset - Buffer write offset
* @param value - Value to decode of type T
* @return offset plus number of bytes written
*/
put(array: Array, offset: number, value: Value): number
}

@ -0,0 +1,33 @@
{
"name": "@tokenizer/token",
"version": "0.3.0",
"description": "TypeScript definition for strtok3 token",
"main": "",
"types": "index.d.ts",
"files": [
"index.d.ts"
],
"keywords": [
"token",
"interface",
"tokenizer",
"TypeScript"
],
"author": {
"name": "Borewit",
"url": "https://github.com/Borewit"
},
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/Borewit/tokenizer-token.git"
},
"bugs": {
"url": "https://github.com/Borewit/tokenizer-token/issues"
},
"typeScriptVersion": "3.0",
"dependencies": {},
"devDependencies": {
"@types/node": "^13.1.0"
}
}

@ -0,0 +1,29 @@
import type {FileTypeResult} from './core.js';
/**
Detect the file type of a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
@example
```
import {fileTypeFromStream} from 'file-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
const response = await fetch(url);
const fileType = await fileTypeFromStream(response.body);
console.log(fileType);
//=> {ext: 'jpg', mime: 'image/jpeg'}
```
*/
export declare function fileTypeFromStream(stream: ReadableStream): Promise<FileTypeResult | undefined>;
export {
fileTypeFromBuffer,
fileTypeFromBlob,
supportedExtensions,
supportedMimeTypes,
type FileTypeResult,
type FileExtension,
type MimeType,
} from './core.js';

15
node_modules/file-type/browser.js generated vendored

@ -0,0 +1,15 @@
import {ReadableWebToNodeStream} from 'readable-web-to-node-stream';
import {fileTypeFromStream as coreFileTypeFromStream} from './core.js';
export async function fileTypeFromStream(stream) {
const readableWebToNodeStream = new ReadableWebToNodeStream(stream);
const fileType = await coreFileTypeFromStream(readableWebToNodeStream);
await readableWebToNodeStream.close();
return fileType;
}
export {
fileTypeFromTokenizer,
fileTypeFromBuffer,
fileTypeStream,
} from './core.js';

437
node_modules/file-type/core.d.ts generated vendored

@ -0,0 +1,437 @@
import type {Readable as ReadableStream} from 'node:stream';
import type {ITokenizer} from 'strtok3';
export type FileExtension =
| 'jpg'
| 'png'
| 'apng'
| 'gif'
| 'webp'
| 'flif'
| 'xcf'
| 'cr2'
| 'cr3'
| 'orf'
| 'arw'
| 'dng'
| 'nef'
| 'rw2'
| 'raf'
| 'tif'
| 'bmp'
| 'icns'
| 'jxr'
| 'psd'
| 'indd'
| 'zip'
| 'tar'
| 'rar'
| 'gz'
| 'bz2'
| '7z'
| 'dmg'
| 'mp4'
| 'mid'
| 'mkv'
| 'webm'
| 'mov'
| 'avi'
| 'mpg'
| 'mp2'
| 'mp3'
| 'm4a'
| 'ogg'
| 'opus'
| 'flac'
| 'wav'
| 'qcp'
| 'amr'
| 'pdf'
| 'epub'
| 'mobi'
| 'elf'
| 'exe'
| 'swf'
| 'rtf'
| 'woff'
| 'woff2'
| 'eot'
| 'ttf'
| 'otf'
| 'ico'
| 'flv'
| 'ps'
| 'xz'
| 'sqlite'
| 'nes'
| 'crx'
| 'xpi'
| 'cab'
| 'deb'
| 'ar'
| 'rpm'
| 'Z'
| 'lz'
| 'cfb'
| 'mxf'
| 'mts'
| 'wasm'
| 'blend'
| 'bpg'
| 'docx'
| 'pptx'
| 'xlsx'
| '3gp'
| '3g2'
| 'j2c'
| 'jp2'
| 'jpm'
| 'jpx'
| 'mj2'
| 'aif'
| 'odt'
| 'ods'
| 'odp'
| 'xml'
| 'heic'
| 'cur'
| 'ktx'
| 'ape'
| 'wv'
| 'asf'
| 'dcm'
| 'mpc'
| 'ics'
| 'glb'
| 'pcap'
| 'dsf'
| 'lnk'
| 'alias'
| 'voc'
| 'ac3'
| 'm4b'
| 'm4p'
| 'm4v'
| 'f4a'
| 'f4b'
| 'f4p'
| 'f4v'
| 'mie'
| 'ogv'
| 'ogm'
| 'oga'
| 'spx'
| 'ogx'
| 'arrow'
| 'shp'
| 'aac'
| 'mp1'
| 'it'
| 's3m'
| 'xm'
| 'ai'
| 'skp'
| 'avif'
| 'eps'
| 'lzh'
| 'pgp'
| 'asar'
| 'stl'
| 'chm'
| '3mf'
| 'zst'
| 'jxl'
| 'vcf'
| 'jls'
| 'pst'
| 'dwg'
| 'parquet'
| 'class'
| 'arj'
| 'cpio'
| 'ace'
| 'avro'
| 'icc'
; // eslint-disable-line semi-style
export type MimeType =
| 'image/jpeg'
| 'image/png'
| 'image/gif'
| 'image/webp'
| 'image/flif'
| 'image/x-xcf'
| 'image/x-canon-cr2'
| 'image/x-canon-cr3'
| 'image/tiff'
| 'image/bmp'
| 'image/icns'
| 'image/vnd.ms-photo'
| 'image/vnd.adobe.photoshop'
| 'application/x-indesign'
| 'application/epub+zip'
| 'application/x-xpinstall'
| 'application/vnd.oasis.opendocument.text'
| 'application/vnd.oasis.opendocument.spreadsheet'
| 'application/vnd.oasis.opendocument.presentation'
| 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
| 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
| 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
| 'application/zip'
| 'application/x-tar'
| 'application/x-rar-compressed'
| 'application/gzip'
| 'application/x-bzip2'
| 'application/x-7z-compressed'
| 'application/x-apple-diskimage'
| 'video/mp4'
| 'audio/midi'
| 'video/x-matroska'
| 'video/webm'
| 'video/quicktime'
| 'video/vnd.avi'
| 'audio/vnd.wave'
| 'audio/qcelp'
| 'audio/x-ms-asf'
| 'video/x-ms-asf'
| 'application/vnd.ms-asf'
| 'video/mpeg'
| 'video/3gpp'
| 'audio/mpeg'
| 'audio/mp4' // RFC 4337
| 'audio/opus'
| 'video/ogg'
| 'audio/ogg'
| 'application/ogg'
| 'audio/x-flac'
| 'audio/ape'
| 'audio/wavpack'
| 'audio/amr'
| 'application/pdf'
| 'application/x-elf'
| 'application/x-msdownload'
| 'application/x-shockwave-flash'
| 'application/rtf'
| 'application/wasm'
| 'font/woff'
| 'font/woff2'
| 'application/vnd.ms-fontobject'
| 'font/ttf'
| 'font/otf'
| 'image/x-icon'
| 'video/x-flv'
| 'application/postscript'
| 'application/eps'
| 'application/x-xz'
| 'application/x-sqlite3'
| 'application/x-nintendo-nes-rom'
| 'application/x-google-chrome-extension'
| 'application/vnd.ms-cab-compressed'
| 'application/x-deb'
| 'application/x-unix-archive'
| 'application/x-rpm'
| 'application/x-compress'
| 'application/x-lzip'
| 'application/x-cfb'
| 'application/x-mie'
| 'application/x-apache-arrow'
| 'application/mxf'
| 'video/mp2t'
| 'application/x-blender'
| 'image/bpg'
| 'image/j2c'
| 'image/jp2'
| 'image/jpx'
| 'image/jpm'
| 'image/mj2'
| 'audio/aiff'
| 'application/xml'
| 'application/x-mobipocket-ebook'
| 'image/heif'
| 'image/heif-sequence'
| 'image/heic'
| 'image/heic-sequence'
| 'image/ktx'
| 'application/dicom'
| 'audio/x-musepack'
| 'text/calendar'
| 'text/vcard'
| 'model/gltf-binary'
| 'application/vnd.tcpdump.pcap'
| 'audio/x-dsf' // Non-standard
| 'application/x.ms.shortcut' // Invented by us
| 'application/x.apple.alias' // Invented by us
| 'audio/x-voc'
| 'audio/vnd.dolby.dd-raw'
| 'audio/x-m4a'
| 'image/apng'
| 'image/x-olympus-orf'
| 'image/x-sony-arw'
| 'image/x-adobe-dng'
| 'image/x-nikon-nef'
| 'image/x-panasonic-rw2'
| 'image/x-fujifilm-raf'
| 'video/x-m4v'
| 'video/3gpp2'
| 'application/x-esri-shape'
| 'audio/aac'
| 'audio/x-it'
| 'audio/x-s3m'
| 'audio/x-xm'
| 'video/MP1S'
| 'video/MP2P'
| 'application/vnd.sketchup.skp'
| 'image/avif'
| 'application/x-lzh-compressed'
| 'application/pgp-encrypted'
| 'application/x-asar'
| 'model/stl'
| 'application/vnd.ms-htmlhelp'
| 'model/3mf'
| 'image/jxl'
| 'application/zstd'
| 'image/jls'
| 'application/vnd.ms-outlook'
| 'image/vnd.dwg'
| 'application/x-parquet'
| 'application/java-vm'
| 'application/x-arj'
| 'application/x-cpio'
| 'application/x-ace-compressed'
| 'application/avro'
| 'application/vnd.iccprofile'
; // eslint-disable-line semi-style
export type FileTypeResult = {
/**
One of the supported [file types](https://github.com/sindresorhus/file-type#supported-file-types).
*/
readonly ext: FileExtension;
/**
The detected [MIME type](https://en.wikipedia.org/wiki/Internet_media_type).
*/
readonly mime: MimeType;
};
export type ReadableStreamWithFileType = ReadableStream & {
readonly fileType?: FileTypeResult;
};
/**
Detect the file type of a `Buffer`, `Uint8Array`, or `ArrayBuffer`.
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
If file access is available, it is recommended to use `.fromFile()` instead.
@param buffer - An Uint8Array or Buffer representing file data. It works best if the buffer contains the entire file, it may work with a smaller portion as well.
@returns The detected file type and MIME type, or `undefined` when there is no match.
*/
export function fileTypeFromBuffer(buffer: Uint8Array | ArrayBuffer): Promise<FileTypeResult | undefined>;
/**
Detect the file type of a Node.js [readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable).
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
@param stream - A readable stream representing file data.
@returns The detected file type and MIME type, or `undefined` when there is no match.
*/
export function fileTypeFromStream(stream: ReadableStream): Promise<FileTypeResult | undefined>;
/**
Detect the file type from an [`ITokenizer`](https://github.com/Borewit/strtok3#tokenizer) source.
This method is used internally, but can also be used for a special "tokenizer" reader.
A tokenizer propagates the internal read functions, allowing alternative transport mechanisms, to access files, to be implemented and used.
@param tokenizer - File source implementing the tokenizer interface.
@returns The detected file type and MIME type, or `undefined` when there is no match.
An example is [`@tokenizer/http`](https://github.com/Borewit/tokenizer-http), which requests data using [HTTP-range-requests](https://developer.mozilla.org/en-US/docs/Web/HTTP/Range_requests). A difference with a conventional stream and the [*tokenizer*](https://github.com/Borewit/strtok3#tokenizer), is that it is able to *ignore* (seek, fast-forward) in the stream. For example, you may only need and read the first 6 bytes, and the last 128 bytes, which may be an advantage in case reading the entire file would take longer.
@example
```
import {makeTokenizer} from '@tokenizer/http';
import {fileTypeFromTokenizer} from 'file-type';
const audioTrackUrl = 'https://test-audio.netlify.com/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/01%20-%20Diablo%20Swing%20Orchestra%20-%20Heroines.mp3';
const httpTokenizer = await makeTokenizer(audioTrackUrl);
const fileType = await fileTypeFromTokenizer(httpTokenizer);
console.log(fileType);
//=> {ext: 'mp3', mime: 'audio/mpeg'}
```
*/
export function fileTypeFromTokenizer(tokenizer: ITokenizer): Promise<FileTypeResult | undefined>;
/**
Supported file extensions.
*/
export const supportedExtensions: ReadonlySet<FileExtension>;
/**
Supported MIME types.
*/
export const supportedMimeTypes: ReadonlySet<MimeType>;
export type StreamOptions = {
/**
The default sample size in bytes.
@default 4100
*/
readonly sampleSize?: number;
};
/**
Returns a `Promise` which resolves to the original readable stream argument, but with an added `fileType` property, which is an object like the one returned from `fileTypeFromFile()`.
This method can be handy to put in between a stream, but it comes with a price.
Internally `stream()` builds up a buffer of `sampleSize` bytes, used as a sample, to determine the file type.
The sample size impacts the file detection resolution.
A smaller sample size will result in lower probability of the best file type detection.
**Note:** This method is only available when using Node.js.
**Note:** Requires Node.js 14 or later.
@param readableStream - A [readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) containing a file to examine.
@returns A `Promise` which resolves to the original readable stream argument, but with an added `fileType` property, which is an object like the one returned from `fileTypeFromFile()`.
@example
```
import got from 'got';
import {fileTypeStream} from 'file-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
const stream1 = got.stream(url);
const stream2 = await fileTypeStream(stream1, {sampleSize: 1024});
if (stream2.fileType?.mime === 'image/jpeg') {
// stream2 can be used to stream the JPEG image (from the very beginning of the stream)
}
```
*/
export function fileTypeStream(readableStream: ReadableStream, options?: StreamOptions): Promise<ReadableStreamWithFileType>;
/**
Detect the file type of a [`Blob`](https://nodejs.org/api/buffer.html#class-blob).
@example
```
import {fileTypeFromBlob} from 'file-type';
const blob = new Blob(['<?xml version="1.0" encoding="ISO-8859-1" ?>'], {
type: 'plain/text',
endings: 'native'
});
console.log(await fileTypeFromBlob(blob));
//=> {ext: 'txt', mime: 'plain/text'}
```
*/
export declare function fileTypeFromBlob(blob: Blob): Promise<FileTypeResult | undefined>;

1641
node_modules/file-type/core.js generated vendored

File diff suppressed because it is too large Load Diff

13
node_modules/file-type/index.d.ts generated vendored

@ -0,0 +1,13 @@
import type {FileTypeResult} from './core.js';
/**
Detect the file type of a file path.
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
@param path - The file path to parse.
@returns The detected file type and MIME type or `undefined` when there is no match.
*/
export function fileTypeFromFile(path: string): Promise<FileTypeResult | undefined>;
export * from './core.js';

13
node_modules/file-type/index.js generated vendored

@ -0,0 +1,13 @@
import * as strtok3 from 'strtok3';
import {fileTypeFromTokenizer} from './core.js';
export async function fileTypeFromFile(path) {
const tokenizer = await strtok3.fromFile(path);
try {
return await fileTypeFromTokenizer(tokenizer);
} finally {
await tokenizer.close();
}
}
export * from './core.js';

9
node_modules/file-type/license generated vendored

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

241
node_modules/file-type/package.json generated vendored

@ -0,0 +1,241 @@
{
"name": "file-type",
"version": "18.5.0",
"description": "Detect the file type of a Buffer/Uint8Array/ArrayBuffer",
"license": "MIT",
"repository": "sindresorhus/file-type",
"funding": "https://github.com/sindresorhus/file-type?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": {
".": {
"node": "./index.js",
"default": "./browser.js"
},
"./core": "./core.js"
},
"engines": {
"node": ">=14.16"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts",
"browser.js",
"browser.d.ts",
"core.js",
"core.d.ts",
"supported.js",
"util.js"
],
"keywords": [
"mime",
"file",
"type",
"magic",
"archive",
"image",
"img",
"pic",
"picture",
"flash",
"photo",
"video",
"detect",
"check",
"is",
"exif",
"elf",
"exe",
"binary",
"buffer",
"uint8array",
"jpg",
"png",
"apng",
"gif",
"webp",
"flif",
"xcf",
"cr2",
"cr3",
"orf",
"arw",
"dng",
"nef",
"rw2",
"raf",
"tif",
"bmp",
"icns",
"jxr",
"psd",
"indd",
"zip",
"tar",
"rar",
"gz",
"bz2",
"7z",
"dmg",
"mp4",
"mid",
"mkv",
"webm",
"mov",
"avi",
"mpg",
"mp2",
"mp3",
"m4a",
"ogg",
"opus",
"flac",
"wav",
"amr",
"pdf",
"epub",
"mobi",
"swf",
"rtf",
"woff",
"woff2",
"eot",
"ttf",
"otf",
"ico",
"flv",
"ps",
"xz",
"sqlite",
"xpi",
"cab",
"deb",
"ar",
"rpm",
"Z",
"lz",
"cfb",
"mxf",
"mts",
"wasm",
"webassembly",
"blend",
"bpg",
"docx",
"pptx",
"xlsx",
"3gp",
"j2c",
"jp2",
"jpm",
"jpx",
"mj2",
"aif",
"odt",
"ods",
"odp",
"xml",
"heic",
"ics",
"glb",
"pcap",
"dsf",
"lnk",
"alias",
"voc",
"ac3",
"3g2",
"m4b",
"m4p",
"m4v",
"f4a",
"f4b",
"f4p",
"f4v",
"mie",
"qcp",
"asf",
"ogv",
"ogm",
"oga",
"spx",
"ogx",
"ape",
"wv",
"cur",
"nes",
"crx",
"ktx",
"dcm",
"mpc",
"arrow",
"shp",
"aac",
"mp1",
"it",
"s3m",
"xm",
"ai",
"skp",
"avif",
"eps",
"lzh",
"pgp",
"asar",
"stl",
"chm",
"3mf",
"zst",
"jxl",
"vcf",
"jls",
"pst",
"dwg",
"parquet",
"class",
"arj",
"cpio",
"ace",
"avro",
"icc"
],
"dependencies": {
"readable-web-to-node-stream": "^3.0.2",
"strtok3": "^7.0.0",
"token-types": "^5.0.1"
},
"devDependencies": {
"@tokenizer/token": "^0.3.0",
"@types/node": "^20.1.2",
"ava": "^5.2.0",
"commonmark": "^0.30.0",
"noop-stream": "^1.0.0",
"tsd": "^0.28.1",
"xo": "^0.54.2"
},
"xo": {
"envs": [
"node",
"browser"
],
"rules": {
"no-inner-declarations": "warn",
"no-await-in-loop": "warn",
"no-bitwise": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"unicorn/text-encoding-identifier-case": "off",
"unicorn/switch-case-braces": "off",
"unicorn/prefer-top-level-await": "off"
}
},
"ava": {
"serial": true
}
}

484
node_modules/file-type/readme.md generated vendored

@ -0,0 +1,484 @@
# file-type
> Detect the file type of a Buffer/Uint8Array/ArrayBuffer
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
This package is for detecting binary-based file formats, not text-based formats like `.txt`, `.csv`, `.svg`, etc.
We accept contributions for commonly used modern file formats, not historical or obscure ones. Open an issue first for discussion.
## Install
```sh
npm install file-type
```
**This package is a ESM package. Your project needs to be ESM too. [Read more](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c).**
If you use it with Webpack, you need the latest Webpack version and ensure you configure it correctly for ESM.
## Usage
#### Node.js
Determine file type from a file:
```js
import {fileTypeFromFile} from 'file-type';
console.log(await fileTypeFromFile('Unicorn.png'));
//=> {ext: 'png', mime: 'image/png'}
```
Determine file type from a Buffer, which may be a portion of the beginning of a file:
```js
import {fileTypeFromBuffer} from 'file-type';
import {readChunk} from 'read-chunk';
const buffer = await readChunk('Unicorn.png', {length: 4100});
console.log(await fileTypeFromBuffer(buffer));
//=> {ext: 'png', mime: 'image/png'}
```
Determine file type from a stream:
```js
import fs from 'node:fs';
import {fileTypeFromStream} from 'file-type';
const stream = fs.createReadStream('Unicorn.mp4');
console.log(await fileTypeFromStream(stream));
//=> {ext: 'mp4', mime: 'video/mp4'}
```
The stream method can also be used to read from a remote location:
```js
import got from 'got';
import {fileTypeFromStream} from 'file-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
const stream = got.stream(url);
console.log(await fileTypeFromStream(stream));
//=> {ext: 'jpg', mime: 'image/jpeg'}
```
Another stream example:
```js
import stream from 'node:stream';
import fs from 'node:fs';
import crypto from 'node:crypto';
import {fileTypeStream} from 'file-type';
const read = fs.createReadStream('encrypted.enc');
const decipher = crypto.createDecipheriv(alg, key, iv);
const streamWithFileType = await fileTypeStream(stream.pipeline(read, decipher));
console.log(streamWithFileType.fileType);
//=> {ext: 'mov', mime: 'video/quicktime'}
const write = fs.createWriteStream(`decrypted.${streamWithFileType.fileType.ext}`);
streamWithFileType.pipe(write);
```
#### Browser
```js
import {fileTypeFromStream} from 'file-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
const response = await fetch(url);
const fileType = await fileTypeFromStream(response.body);
console.log(fileType);
//=> {ext: 'jpg', mime: 'image/jpeg'}
```
## API
### fileTypeFromBuffer(buffer)
Detect the file type of a `Buffer`, `Uint8Array`, or `ArrayBuffer`.
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
If file access is available, it is recommended to use `fileTypeFromFile()` instead.
Returns a `Promise` for an object with the detected file type and MIME type:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
#### buffer
Type: `Buffer | Uint8Array | ArrayBuffer`
A buffer representing file data. It works best if the buffer contains the entire file, it may work with a smaller portion as well.
### fileTypeFromFile(filePath)
Detect the file type of a file path.
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
Returns a `Promise` for an object with the detected file type and MIME type:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
#### filePath
Type: `string`
The file path to parse.
### fileTypeFromStream(stream)
Detect the file type of a Node.js [readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable).
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
Returns a `Promise` for an object with the detected file type and MIME type:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
#### stream
Type: [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable)
A readable stream representing file data.
### fileTypeFromBlob(blob)
Detect the file type of a [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob).
The file type is detected by checking the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
Returns a `Promise` for an object with the detected file type and MIME type:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
```js
import {fileTypeFromBlob} from 'file-type';
const blob = new Blob(['<?xml version="1.0" encoding="ISO-8859-1" ?>'], {
type: 'plain/text',
endings: 'native'
});
console.log(await fileTypeFromBlob(blob));
//=> {ext: 'txt', mime: 'plain/text'}
```
### fileTypeFromTokenizer(tokenizer)
Detect the file type from an `ITokenizer` source.
This method is used internally, but can also be used for a special "tokenizer" reader.
A tokenizer propagates the internal read functions, allowing alternative transport mechanisms, to access files, to be implemented and used.
Returns a `Promise` for an object with the detected file type and MIME type:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
An example is [`@tokenizer/http`](https://github.com/Borewit/tokenizer-http), which requests data using [HTTP-range-requests](https://developer.mozilla.org/en-US/docs/Web/HTTP/Range_requests). A difference with a conventional stream and the [*tokenizer*](https://github.com/Borewit/strtok3#tokenizer), is that it can *ignore* (seek, fast-forward) in the stream. For example, you may only need and read the first 6 bytes, and the last 128 bytes, which may be an advantage in case reading the entire file would take longer.
```js
import {makeTokenizer} from '@tokenizer/http';
import {fileTypeFromTokenizer} from 'file-type';
const audioTrackUrl = 'https://test-audio.netlify.com/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/01%20-%20Diablo%20Swing%20Orchestra%20-%20Heroines.mp3';
const httpTokenizer = await makeTokenizer(audioTrackUrl);
const fileType = await fileTypeFromTokenizer(httpTokenizer);
console.log(fileType);
//=> {ext: 'mp3', mime: 'audio/mpeg'}
```
Or use [`@tokenizer/s3`](https://github.com/Borewit/tokenizer-s3) to determine the file type of a file stored on [Amazon S3](https://aws.amazon.com/s3):
```js
import S3 from 'aws-sdk/clients/s3';
import {makeTokenizer} from '@tokenizer/s3';
import {fileTypeFromTokenizer} from 'file-type';
// Initialize the S3 client
const s3 = new S3();
// Initialize the S3 tokenizer.
const s3Tokenizer = await makeTokenizer(s3, {
Bucket: 'affectlab',
Key: '1min_35sec.mp4'
});
// Figure out what kind of file it is.
const fileType = await fileTypeFromTokenizer(s3Tokenizer);
console.log(fileType);
```
Note that only the minimum amount of data required to determine the file type is read (okay, just a bit extra to prevent too many fragmented reads).
#### tokenizer
Type: [`ITokenizer`](https://github.com/Borewit/strtok3#tokenizer)
A file source implementing the [tokenizer interface](https://github.com/Borewit/strtok3#tokenizer).
### fileTypeStream(readableStream, options?)
Returns a `Promise` which resolves to the original readable stream argument, but with an added `fileType` property, which is an object like the one returned from `fileTypeFromFile()`.
This method can be handy to put in between a stream, but it comes with a price.
Internally `stream()` builds up a buffer of `sampleSize` bytes, used as a sample, to determine the file type.
The sample size impacts the file detection resolution.
A smaller sample size will result in lower probability of the best file type detection.
**Note:** This method is only available when using Node.js.
**Note:** Requires Node.js 14 or later.
#### readableStream
Type: [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable)
#### options
Type: `object`
##### sampleSize
Type: `number`\
Default: `4100`
The sample size in bytes.
#### Example
```js
import got from 'got';
import {fileTypeStream} from 'file-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
const stream1 = got.stream(url);
const stream2 = await fileTypeStream(stream1, {sampleSize: 1024});
if (stream2.fileType?.mime === 'image/jpeg') {
// stream2 can be used to stream the JPEG image (from the very beginning of the stream)
}
```
#### readableStream
Type: [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable)
The input stream.
### supportedExtensions
Returns a `Set<string>` of supported file extensions.
### supportedMimeTypes
Returns a `Set<string>` of supported MIME types.
## Supported file types
- [`3g2`](https://en.wikipedia.org/wiki/3GP_and_3G2#3G2) - Multimedia container format defined by the 3GPP2 for 3G CDMA2000 multimedia services
- [`3gp`](https://en.wikipedia.org/wiki/3GP_and_3G2#3GP) - Multimedia container format defined by the Third Generation Partnership Project (3GPP) for 3G UMTS multimedia services
- [`3mf`](https://en.wikipedia.org/wiki/3D_Manufacturing_Format) - 3D Manufacturing Format
- [`7z`](https://en.wikipedia.org/wiki/7z) - 7-Zip archive
- [`Z`](https://fileinfo.com/extension/z) - Unix Compressed File
- [`aac`](https://en.wikipedia.org/wiki/Advanced_Audio_Coding) - Advanced Audio Coding
- [`ac3`](https://www.atsc.org/standard/a522012-digital-audio-compression-ac-3-e-ac-3-standard-12172012/) - ATSC A/52 Audio File
- [`ace`](https://en.wikipedia.org/wiki/ACE_(compressed_file_format)) - ACE archive
- [`ai`](https://en.wikipedia.org/wiki/Adobe_Illustrator_Artwork) - Adobe Illustrator Artwork
- [`aif`](https://en.wikipedia.org/wiki/Audio_Interchange_File_Format) - Audio Interchange file
- [`alias`](https://en.wikipedia.org/wiki/Alias_%28Mac_OS%29) - macOS Alias file
- [`amr`](https://en.wikipedia.org/wiki/Adaptive_Multi-Rate_audio_codec) - Adaptive Multi-Rate audio codec
- [`ape`](https://en.wikipedia.org/wiki/Monkey%27s_Audio) - Monkey's Audio
- [`apng`](https://en.wikipedia.org/wiki/APNG) - Animated Portable Network Graphics
- [`ar`](https://en.wikipedia.org/wiki/Ar_(Unix)) - Archive file
- [`arj`](https://en.wikipedia.org/wiki/ARJ) - Archive file
- [`arrow`](https://arrow.apache.org) - Columnar format for tables of data
- [`arw`](https://en.wikipedia.org/wiki/Raw_image_format#ARW) - Sony Alpha Raw image file
- [`asar`](https://github.com/electron/asar#format) - Archive format primarily used to enclose Electron applications
- [`asf`](https://en.wikipedia.org/wiki/Advanced_Systems_Format) - Advanced Systems Format
- [`avi`](https://en.wikipedia.org/wiki/Audio_Video_Interleave) - Audio Video Interleave file
- [`avif`](https://en.wikipedia.org/wiki/AV1#AV1_Image_File_Format_(AVIF)) - AV1 Image File Format
- [`avro`](https://en.wikipedia.org/wiki/Apache_Avro#Avro_Object_Container_File) - Object container file developed by Apache Avro
- [`blend`](https://wiki.blender.org/index.php/Dev:Source/Architecture/File_Format) - Blender project
- [`bmp`](https://en.wikipedia.org/wiki/BMP_file_format) - Bitmap image file
- [`bpg`](https://bellard.org/bpg/) - Better Portable Graphics file
- [`bz2`](https://en.wikipedia.org/wiki/Bzip2) - Archive file
- [`cab`](https://en.wikipedia.org/wiki/Cabinet_(file_format)) - Cabinet file
- [`cfb`](https://en.wikipedia.org/wiki/Compound_File_Binary_Format) - Compount File Binary Format
- [`chm`](https://en.wikipedia.org/wiki/Microsoft_Compiled_HTML_Help) - Microsoft Compiled HTML Help
- [`class`](https://en.wikipedia.org/wiki/Java_class_file) - Java class file
- [`cpio`](https://en.wikipedia.org/wiki/Cpio) - Cpio archive
- [`cr2`](https://fileinfo.com/extension/cr2) - Canon Raw image file (v2)
- [`cr3`](https://fileinfo.com/extension/cr3) - Canon Raw image file (v3)
- [`crx`](https://developer.chrome.com/extensions/crx) - Google Chrome extension
- [`cur`](https://en.wikipedia.org/wiki/ICO_(file_format)) - Icon file
- [`dcm`](https://en.wikipedia.org/wiki/DICOM#Data_format) - DICOM Image File
- [`deb`](https://en.wikipedia.org/wiki/Deb_(file_format)) - Debian package
- [`dmg`](https://en.wikipedia.org/wiki/Apple_Disk_Image) - Apple Disk Image
- [`dng`](https://en.wikipedia.org/wiki/Digital_Negative) - Adobe Digital Negative image file
- [`docx`](https://en.wikipedia.org/wiki/Office_Open_XML) - Microsoft Word
- [`dsf`](https://dsd-guide.com/sites/default/files/white-papers/DSFFileFormatSpec_E.pdf) - Sony DSD Stream File (DSF)
- [`dwg`](https://en.wikipedia.org/wiki/.dwg) - Autodesk CAD file
- [`elf`](https://en.wikipedia.org/wiki/Executable_and_Linkable_Format) - Unix Executable and Linkable Format
- [`eot`](https://en.wikipedia.org/wiki/Embedded_OpenType) - Embedded OpenType font
- [`eps`](https://en.wikipedia.org/wiki/Encapsulated_PostScript) - Encapsulated PostScript
- [`epub`](https://en.wikipedia.org/wiki/EPUB) - E-book file
- [`exe`](https://en.wikipedia.org/wiki/.exe) - Executable file
- [`f4a`](https://en.wikipedia.org/wiki/Flash_Video) - Audio-only ISO base media file format used by Adobe Flash Player
- [`f4b`](https://en.wikipedia.org/wiki/Flash_Video) - Audiobook and podcast ISO base media file format used by Adobe Flash Player
- [`f4p`](https://en.wikipedia.org/wiki/Flash_Video) - ISO base media file format protected by Adobe Access DRM used by Adobe Flash Player
- [`f4v`](https://en.wikipedia.org/wiki/Flash_Video) - ISO base media file format used by Adobe Flash Player
- [`flac`](https://en.wikipedia.org/wiki/FLAC) - Free Lossless Audio Codec
- [`flif`](https://en.wikipedia.org/wiki/Free_Lossless_Image_Format) - Free Lossless Image Format
- [`flv`](https://en.wikipedia.org/wiki/Flash_Video) - Flash video
- [`gif`](https://en.wikipedia.org/wiki/GIF) - Graphics Interchange Format
- [`glb`](https://github.com/KhronosGroup/glTF) - GL Transmission Format
- [`gz`](https://en.wikipedia.org/wiki/Gzip) - Archive file
- [`heic`](https://nokiatech.github.io/heif/technical.html) - High Efficiency Image File Format
- [`icc`](https://en.wikipedia.org/wiki/ICC_profile) - ICC Profile
- [`icns`](https://en.wikipedia.org/wiki/Apple_Icon_Image_format) - Apple Icon image
- [`ico`](https://en.wikipedia.org/wiki/ICO_(file_format)) - Windows icon file
- [`ics`](https://en.wikipedia.org/wiki/ICalendar#Data_format) - iCalendar
- [`indd`](https://en.wikipedia.org/wiki/Adobe_InDesign#File_format) - Adobe InDesign document
- [`it`](https://wiki.openmpt.org/Manual:_Module_formats#The_Impulse_Tracker_format_.28.it.29) - Audio module format: Impulse Tracker
- [`j2c`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jls`](https://en.wikipedia.org/wiki/Lossless_JPEG#JPEG-LS) - Lossless/near-lossless compression standard for continuous-tone images
- [`jp2`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jpg`](https://en.wikipedia.org/wiki/JPEG) - Joint Photographic Experts Group image
- [`jpm`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jpx`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jxl`](https://en.wikipedia.org/wiki/JPEG_XL) - JPEG XL image format
- [`jxr`](https://en.wikipedia.org/wiki/JPEG_XR) - Joint Photographic Experts Group extended range
- [`ktx`](https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/) - OpenGL and OpenGL ES textures
- [`lnk`](https://en.wikipedia.org/wiki/Shortcut_%28computing%29#Microsoft_Windows) - Microsoft Windows file shortcut
- [`lz`](https://en.wikipedia.org/wiki/Lzip) - Arhive file
- [`lzh`](https://en.wikipedia.org/wiki/LHA_(file_format)) - LZH archive
- [`m4a`](https://en.wikipedia.org/wiki/M4A) - Audio-only MPEG-4 files
- [`m4b`](https://en.wikipedia.org/wiki/M4B) - Audiobook and podcast MPEG-4 files, which also contain metadata including chapter markers, images, and hyperlinks
- [`m4p`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#Filename_extensions) - MPEG-4 files with audio streams encrypted by FairPlay Digital Rights Management as were sold through the iTunes Store
- [`m4v`](https://en.wikipedia.org/wiki/M4V) - Video container format developed by Apple, which is very similar to the MP4 format
- [`mid`](https://en.wikipedia.org/wiki/MIDI) - Musical Instrument Digital Interface file
- [`mie`](https://en.wikipedia.org/wiki/Sidecar_file) - Dedicated meta information format which supports storage of binary as well as textual meta information
- [`mj2`](https://en.wikipedia.org/wiki/Motion_JPEG_2000) - Motion JPEG 2000
- [`mkv`](https://en.wikipedia.org/wiki/Matroska) - Matroska video file
- [`mobi`](https://en.wikipedia.org/wiki/Mobipocket) - Mobipocket
- [`mov`](https://en.wikipedia.org/wiki/QuickTime_File_Format) - QuickTime video file
- [`mp1`](https://en.wikipedia.org/wiki/MPEG-1_Audio_Layer_I) - MPEG-1 Audio Layer I
- [`mp2`](https://en.wikipedia.org/wiki/MPEG-1_Audio_Layer_II) - MPEG-1 Audio Layer II
- [`mp3`](https://en.wikipedia.org/wiki/MP3) - Audio file
- [`mp4`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#Filename_extensions) - MPEG-4 Part 14 video file
- [`mpc`](https://en.wikipedia.org/wiki/Musepack) - Musepack (SV7 & SV8)
- [`mpg`](https://en.wikipedia.org/wiki/MPEG-1) - MPEG-1 file
- [`mts`](https://en.wikipedia.org/wiki/.m2ts) - MPEG-2 Transport Stream, both raw and Blu-ray Disc Audio-Video (BDAV) versions
- [`mxf`](https://en.wikipedia.org/wiki/Material_Exchange_Format) - Material Exchange Format
- [`nef`](https://www.nikonusa.com/en/learn-and-explore/a/products-and-innovation/nikon-electronic-format-nef.html) - Nikon Electronic Format image file
- [`nes`](https://fileinfo.com/extension/nes) - Nintendo NES ROM
- [`odp`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for presentations
- [`ods`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for spreadsheets
- [`odt`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for word processing
- [`oga`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`ogg`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`ogm`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`ogv`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`ogx`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`opus`](https://en.wikipedia.org/wiki/Opus_(audio_format)) - Audio file
- [`orf`](https://en.wikipedia.org/wiki/ORF_format) - Olympus Raw image file
- [`otf`](https://en.wikipedia.org/wiki/OpenType) - OpenType font
- [`parquet`](https://en.wikipedia.org/wiki/Apache_Parquet) - Apache Parquet
- [`pcap`](https://wiki.wireshark.org/Development/LibpcapFileFormat) - Libpcap File Format
- [`pdf`](https://en.wikipedia.org/wiki/Portable_Document_Format) - Portable Document Format
- [`pgp`](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) - Pretty Good Privacy
- [`png`](https://en.wikipedia.org/wiki/Portable_Network_Graphics) - Portable Network Graphics
- [`pptx`](https://en.wikipedia.org/wiki/Office_Open_XML) - Microsoft Powerpoint
- [`ps`](https://en.wikipedia.org/wiki/Postscript) - Postscript
- [`psd`](https://en.wikipedia.org/wiki/Adobe_Photoshop#File_format) - Adobe Photoshop document
- [`pst`](https://en.wikipedia.org/wiki/Personal_Storage_Table) - Personal Storage Table file
- [`qcp`](https://en.wikipedia.org/wiki/QCP) - Tagged and chunked data
- [`raf`](https://en.wikipedia.org/wiki/Raw_image_format) - Fujifilm RAW image file
- [`rar`](https://en.wikipedia.org/wiki/RAR_(file_format)) - Archive file
- [`rpm`](https://fileinfo.com/extension/rpm) - Red Hat Package Manager file
- [`rtf`](https://en.wikipedia.org/wiki/Rich_Text_Format) - Rich Text Format
- [`rw2`](https://en.wikipedia.org/wiki/Raw_image_format) - Panasonic RAW image file
- [`s3m`](https://wiki.openmpt.org/Manual:_Module_formats#The_ScreamTracker_3_format_.28.s3m.29) - Audio module format: ScreamTracker 3
- [`shp`](https://en.wikipedia.org/wiki/Shapefile) - Geospatial vector data format
- [`skp`](https://en.wikipedia.org/wiki/SketchUp) - SketchUp
- [`spx`](https://en.wikipedia.org/wiki/Ogg) - Audio file
- [`sqlite`](https://www.sqlite.org/fileformat2.html) - SQLite file
- [`stl`](https://en.wikipedia.org/wiki/STL_(file_format)) - Standard Tesselated Geometry File Format (ASCII only)
- [`swf`](https://en.wikipedia.org/wiki/SWF) - Adobe Flash Player file
- [`tar`](https://en.wikipedia.org/wiki/Tar_(computing)#File_format) - Tarball archive file
- [`tif`](https://en.wikipedia.org/wiki/Tagged_Image_File_Format) - Tagged Image file
- [`ttf`](https://en.wikipedia.org/wiki/TrueType) - TrueType font
- [`vcf`](https://en.wikipedia.org/wiki/VCard) - vCard
- [`voc`](https://wiki.multimedia.cx/index.php/Creative_Voice) - Creative Voice File
- [`wasm`](https://en.wikipedia.org/wiki/WebAssembly) - WebAssembly intermediate compiled format
- [`wav`](https://en.wikipedia.org/wiki/WAV) - Waveform Audio file
- [`webm`](https://en.wikipedia.org/wiki/WebM) - Web video file
- [`webp`](https://en.wikipedia.org/wiki/WebP) - Web Picture format
- [`woff`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) - Web Open Font Format
- [`woff2`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) - Web Open Font Format
- [`wv`](https://en.wikipedia.org/wiki/WavPack) - WavPack
- [`xcf`](https://en.wikipedia.org/wiki/XCF_(file_format)) - eXperimental Computing Facility
- [`xlsx`](https://en.wikipedia.org/wiki/Office_Open_XML) - Microsoft Excel
- [`xm`](https://wiki.openmpt.org/Manual:_Module_formats#The_FastTracker_2_format_.28.xm.29) - Audio module format: FastTracker 2
- [`xml`](https://en.wikipedia.org/wiki/XML) - eXtensible Markup Language
- [`xpi`](https://en.wikipedia.org/wiki/XPInstall) - XPInstall file
- [`xz`](https://en.wikipedia.org/wiki/Xz) - Compressed file
- [`zip`](https://en.wikipedia.org/wiki/Zip_(file_format)) - Archive file
- [`zst`](https://en.wikipedia.org/wiki/Zstandard) - Archive file
*Pull requests are welcome for additional commonly used file types.*
The following file types will not be accepted:
- [MS-CFB: Microsoft Compound File Binary File Format based formats](https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-cfb/53989ce4-7b05-4f8d-829b-d08d6148375b), too old and difficult to parse:
- `.doc` - Microsoft Word 97-2003 Document
- `.xls` - Microsoft Excel 97-2003 Document
- `.ppt` - Microsoft PowerPoint97-2003 Document
- `.msi` - Microsoft Windows Installer
- `.csv` - [Reason.](https://github.com/sindresorhus/file-type/issues/264#issuecomment-568439196)
- `.svg` - Detecting it requires a full-blown parser. Check out [`is-svg`](https://github.com/sindresorhus/is-svg) for something that mostly works.
## Related
- [file-type-cli](https://github.com/sindresorhus/file-type-cli) - CLI for this module
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Borewit](https://github.com/Borewit)
**Former**
- [Mikael Finstad](https://github.com/mifi)
- [Ben Brook](https://github.com/bencmbrook)

300
node_modules/file-type/supported.js generated vendored

@ -0,0 +1,300 @@
export const extensions = [
'jpg',
'png',
'apng',
'gif',
'webp',
'flif',
'xcf',
'cr2',
'cr3',
'orf',
'arw',
'dng',
'nef',
'rw2',
'raf',
'tif',
'bmp',
'icns',
'jxr',
'psd',
'indd',
'zip',
'tar',
'rar',
'gz',
'bz2',
'7z',
'dmg',
'mp4',
'mid',
'mkv',
'webm',
'mov',
'avi',
'mpg',
'mp2',
'mp3',
'm4a',
'oga',
'ogg',
'ogv',
'opus',
'flac',
'wav',
'spx',
'amr',
'pdf',
'epub',
'elf',
'exe',
'swf',
'rtf',
'wasm',
'woff',
'woff2',
'eot',
'ttf',
'otf',
'ico',
'flv',
'ps',
'xz',
'sqlite',
'nes',
'crx',
'xpi',
'cab',
'deb',
'ar',
'rpm',
'Z',
'lz',
'cfb',
'mxf',
'mts',
'blend',
'bpg',
'docx',
'pptx',
'xlsx',
'3gp',
'3g2',
'j2c',
'jp2',
'jpm',
'jpx',
'mj2',
'aif',
'qcp',
'odt',
'ods',
'odp',
'xml',
'mobi',
'heic',
'cur',
'ktx',
'ape',
'wv',
'dcm',
'ics',
'glb',
'pcap',
'dsf',
'lnk',
'alias',
'voc',
'ac3',
'm4v',
'm4p',
'm4b',
'f4v',
'f4p',
'f4b',
'f4a',
'mie',
'asf',
'ogm',
'ogx',
'mpc',
'arrow',
'shp',
'aac',
'mp1',
'it',
's3m',
'xm',
'ai',
'skp',
'avif',
'eps',
'lzh',
'pgp',
'asar',
'stl',
'chm',
'3mf',
'zst',
'jxl',
'vcf',
'jls',
'pst',
'dwg',
'parquet',
'class',
'arj',
'cpio',
'ace',
'avro',
'icc',
];
export const mimeTypes = [
'image/jpeg',
'image/png',
'image/gif',
'image/webp',
'image/flif',
'image/x-xcf',
'image/x-canon-cr2',
'image/x-canon-cr3',
'image/tiff',
'image/bmp',
'image/vnd.ms-photo',
'image/vnd.adobe.photoshop',
'application/x-indesign',
'application/epub+zip',
'application/x-xpinstall',
'application/vnd.oasis.opendocument.text',
'application/vnd.oasis.opendocument.spreadsheet',
'application/vnd.oasis.opendocument.presentation',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/zip',
'application/x-tar',
'application/x-rar-compressed',
'application/gzip',
'application/x-bzip2',
'application/x-7z-compressed',
'application/x-apple-diskimage',
'application/x-apache-arrow',
'video/mp4',
'audio/midi',
'video/x-matroska',
'video/webm',
'video/quicktime',
'video/vnd.avi',
'audio/vnd.wave',
'audio/qcelp',
'audio/x-ms-asf',
'video/x-ms-asf',
'application/vnd.ms-asf',
'video/mpeg',
'video/3gpp',
'audio/mpeg',
'audio/mp4', // RFC 4337
'audio/opus',
'video/ogg',
'audio/ogg',
'application/ogg',
'audio/x-flac',
'audio/ape',
'audio/wavpack',
'audio/amr',
'application/pdf',
'application/x-elf',
'application/x-msdownload',
'application/x-shockwave-flash',
'application/rtf',
'application/wasm',
'font/woff',
'font/woff2',
'application/vnd.ms-fontobject',
'font/ttf',
'font/otf',
'image/x-icon',
'video/x-flv',
'application/postscript',
'application/eps',
'application/x-xz',
'application/x-sqlite3',
'application/x-nintendo-nes-rom',
'application/x-google-chrome-extension',
'application/vnd.ms-cab-compressed',
'application/x-deb',
'application/x-unix-archive',
'application/x-rpm',
'application/x-compress',
'application/x-lzip',
'application/x-cfb',
'application/x-mie',
'application/mxf',
'video/mp2t',
'application/x-blender',
'image/bpg',
'image/j2c',
'image/jp2',
'image/jpx',
'image/jpm',
'image/mj2',
'audio/aiff',
'application/xml',
'application/x-mobipocket-ebook',
'image/heif',
'image/heif-sequence',
'image/heic',
'image/heic-sequence',
'image/icns',
'image/ktx',
'application/dicom',
'audio/x-musepack',
'text/calendar',
'text/vcard',
'model/gltf-binary',
'application/vnd.tcpdump.pcap',
'audio/x-dsf', // Non-standard
'application/x.ms.shortcut', // Invented by us
'application/x.apple.alias', // Invented by us
'audio/x-voc',
'audio/vnd.dolby.dd-raw',
'audio/x-m4a',
'image/apng',
'image/x-olympus-orf',
'image/x-sony-arw',
'image/x-adobe-dng',
'image/x-nikon-nef',
'image/x-panasonic-rw2',
'image/x-fujifilm-raf',
'video/x-m4v',
'video/3gpp2',
'application/x-esri-shape',
'audio/aac',
'audio/x-it',
'audio/x-s3m',
'audio/x-xm',
'video/MP1S',
'video/MP2P',
'application/vnd.sketchup.skp',
'image/avif',
'application/x-lzh-compressed',
'application/pgp-encrypted',
'application/x-asar',
'model/stl',
'application/vnd.ms-htmlhelp',
'model/3mf',
'image/jxl',
'application/zstd',
'image/jls',
'application/vnd.ms-outlook',
'image/vnd.dwg',
'application/x-parquet',
'application/java-vm',
'application/x-arj',
'application/x-cpio',
'application/x-ace-compressed',
'application/avro',
'application/vnd.iccprofile',
];

38
node_modules/file-type/util.js generated vendored

@ -0,0 +1,38 @@
export function stringToBytes(string) {
return [...string].map(character => character.charCodeAt(0)); // eslint-disable-line unicorn/prefer-code-point
}
/**
Checks whether the TAR checksum is valid.
@param {Buffer} buffer - The TAR header `[offset ... offset + 512]`.
@param {number} offset - TAR header offset.
@returns {boolean} `true` if the TAR checksum is valid, otherwise `false`.
*/
export function tarHeaderChecksumMatches(buffer, offset = 0) {
const readSum = Number.parseInt(buffer.toString('utf8', 148, 154).replace(/\0.*$/, '').trim(), 8); // Read sum in header
if (Number.isNaN(readSum)) {
return false;
}
let sum = 8 * 0x20; // Initialize signed bit sum
for (let index = offset; index < offset + 148; index++) {
sum += buffer[index];
}
for (let index = offset + 156; index < offset + 512; index++) {
sum += buffer[index];
}
return readSum === sum;
}
/**
ID3 UINT32 sync-safe tokenizer token.
28 bits (representing up to 256MB) integer, the msb is 0 to avoid "false syncsignals".
*/
export const uint32SyncSafeToken = {
get: (buffer, offset) => (buffer[offset + 3] & 0x7F) | ((buffer[offset + 2]) << 7) | ((buffer[offset + 1]) << 14) | ((buffer[offset]) << 21),
len: 4,
};

@ -0,0 +1,76 @@
import {type Buffer} from 'node:buffer';
export type ImageFileExtension =
| 'jpg'
| 'png'
| 'gif'
| 'webp'
| 'flif'
| 'cr2'
| 'tif'
| 'bmp'
| 'jxr'
| 'psd'
| 'ico'
| 'bpg'
| 'jp2'
| 'jpm'
| 'jpx'
| 'heic'
| 'cur'
| 'dcm'
| 'avif';
export type ImageTypeResult = {
/**
One of the supported [file types](https://github.com/sindresorhus/image-type#supported-file-types).
*/
ext: ImageFileExtension;
/**
The detected [MIME type](https://en.wikipedia.org/wiki/Internet_media_type).
*/
mime: string;
};
/**
Detect the image type of a `Buffer`/`Uint8Array`.
@param input - Input for which to determine the file type. It only needs the first `minimumBytes` amount of bytes.
@example
```
import {readChunk} from 'read-chunk';
import imageType, {minimumBytes} from 'image-type';
const buffer = await readChunk('unicorn.png', {length: minimumBytes});
await imageType(buffer);
//=> {ext: 'png', mime: 'image/png'}
```
@example
```
import https from 'node:https';
import imageType, {minimumBytes} from 'image-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
https.get(url, response => {
response.on('readable', () => {
(async () => {
const chunk = response.read(minimumBytes);
response.destroy();
console.log(await imageType(chunk));
//=> {ext: 'jpg', mime: 'image/jpeg'}
})();
});
});
```
*/
export default function imageType(input: Buffer | Uint8Array): Promise<ImageTypeResult | undefined>;
/**
The minimum amount of bytes needed to detect a file type. Currently, it's 4100 bytes, but it can change, so don't hard-code it.
*/
export const minimumBytes: number;

30
node_modules/image-type/index.js generated vendored

@ -0,0 +1,30 @@
import {fileTypeFromBuffer} from 'file-type';
const imageExtensions = new Set([
'jpg',
'png',
'gif',
'webp',
'flif',
'cr2',
'tif',
'bmp',
'jxr',
'psd',
'ico',
'bpg',
'jp2',
'jpm',
'jpx',
'heic',
'cur',
'dcm',
'avif',
]);
export default async function imageType(input) {
const result = await fileTypeFromBuffer(input);
return imageExtensions.has(result?.ext) && result;
}
export const minimumBytes = 4100;

9
node_modules/image-type/license generated vendored

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,59 @@
{
"name": "image-type",
"version": "5.2.0",
"description": "Detect the image type of a Buffer/Uint8Array",
"license": "MIT",
"repository": "sindresorhus/image-type",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"types": "./index.d.ts",
"engines": {
"node": ">=14.16"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"image",
"picture",
"photo",
"type",
"detect",
"check",
"is",
"exif",
"binary",
"buffer",
"uint8array",
"png",
"jpg",
"jpeg",
"gif",
"webp",
"tif",
"bmp",
"jxr",
"psd",
"mime"
],
"dependencies": {
"file-type": "^18.1.0"
},
"devDependencies": {
"@types/node": "^18.11.18",
"ava": "^5.1.0",
"read-chunk": "^4.0.3",
"tsd": "^0.25.0",
"xo": "^0.53.1"
}
}

108
node_modules/image-type/readme.md generated vendored

@ -0,0 +1,108 @@
# image-type
> Detect the image type of a Buffer/Uint8Array
See the [`file-type`](https://github.com/sindresorhus/file-type) module for more file types and a CLI.
## Install
```sh
npm install image-type
```
## Usage
##### Node.js
```js
import {readChunk} from 'read-chunk';
import imageType, {minimumBytes} from 'image-type';
const buffer = await readChunk('unicorn.png', {length: minimumBytes});
await imageType(buffer);
//=> {ext: 'png', mime: 'image/png'}
```
Or from a remote location:
```js
import https from 'node:https';
import imageType, {minimumBytes} from 'image-type';
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Example.jpg';
https.get(url, response => {
response.on('readable', () => {
(async () => {
const chunk = response.read(minimumBytes);
response.destroy();
console.log(await imageType(chunk));
//=> {ext: 'jpg', mime: 'image/jpeg'}
})();
});
});
```
##### Browser
```js
const xhr = new XMLHttpRequest();
xhr.open('GET', 'unicorn.png');
xhr.responseType = 'arraybuffer';
xhr.onload = () => {
(async () => {
await imageType(new Uint8Array(this.response));
//=> {ext: 'png', mime: 'image/png'}
})();
};
xhr.send();
```
## API
### imageType(input)
Returns an `Promise<object>` with:
- `ext` - One of the [supported file types](#supported-file-types)
- `mime` - The [MIME type](https://en.wikipedia.org/wiki/Internet_media_type)
Or `undefined` when there is no match.
#### input
Type: `Buffer | Uint8Array`
It only needs the first `minimumBytes` amount of bytes.
### minimumBytes
Type: `number`
The minimum amount of bytes needed to detect a file type. Currently, it's 4100 bytes, but it can change, so don't hardcode it.
## Supported file types
- [`jpg`](https://en.wikipedia.org/wiki/JPEG)
- [`png`](https://en.wikipedia.org/wiki/Portable_Network_Graphics)
- [`gif`](https://en.wikipedia.org/wiki/GIF)
- [`webp`](https://en.wikipedia.org/wiki/WebP)
- [`flif`](https://en.wikipedia.org/wiki/Free_Lossless_Image_Format)
- [`cr2`](https://fileinfo.com/extension/cr2)
- [`tif`](https://en.wikipedia.org/wiki/Tagged_Image_File_Format)
- [`bmp`](https://en.wikipedia.org/wiki/BMP_file_format)
- [`jxr`](https://en.wikipedia.org/wiki/JPEG_XR)
- [`psd`](https://en.wikipedia.org/wiki/Adobe_Photoshop#File_format)
- [`ico`](https://en.wikipedia.org/wiki/ICO_(file_format))
- [`bpg`](https://bellard.org/bpg/)
- [`jp2`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jpm`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`jpx`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
- [`heic`](https://nokiatech.github.io/heif/technical.html)
- [`cur`](https://en.wikipedia.org/wiki/ICO_(file_format))
- [`dcm`](https://en.wikipedia.org/wiki/DICOM#Data_format) - DICOM Image File
*SVG isn't included as it requires the whole file to be read, but you can get it [here](https://github.com/sindresorhus/is-svg).*

@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2010-2017 Borewit
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

@ -0,0 +1,95 @@
![Node.js CI](https://github.com/Borewit/peek-readable/workflows/Node.js%20CI/badge.svg)
[![NPM version](https://badge.fury.io/js/peek-readable.svg)](https://npmjs.org/package/peek-readable)
[![npm downloads](http://img.shields.io/npm/dm/peek-readable.svg)](https://npmcharts.com/compare/peek-readable?start=600&interval=30)
[![Coverage Status](https://coveralls.io/repos/github/Borewit/peek-readable/badge.svg?branch=master)](https://coveralls.io/github/Borewit/peek-readable?branch=master)
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/d4b511481b3a4634b6ca5c0724407eb9)](https://www.codacy.com/gh/Borewit/peek-readable/dashboard?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=Borewit/peek-readable&amp;utm_campaign=Badge_Grade)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/Borewit/peek-readable.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/peek-readable/alerts/)
[![Language grade: JavaScript](https://img.shields.io/lgtm/grade/javascript/g/Borewit/peek-readable.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/peek-readable/context:javascript)
[![Known Vulnerabilities](https://snyk.io/test/github/Borewit/peek-readable/badge.svg?targetFile=package.json)](https://snyk.io/test/github/Borewit/peek-readable?targetFile=package.json)
# peek-readable
A promise based asynchronous stream reader, which makes reading from a stream easy.
Allows to read and peek from a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams)
Note that [peek-readable](https://github.com/Borewit/peek-readable) was formally released as [then-read-stream](https://github.com/Borewit/peek-readable).
## Usage
### Installation
```shell script
npm install --save peek-readable
```
The `peek-readable` contains one class: `StreamReader`, which reads from a [stream.Readable](https://nodejs.org/api/stream.html#stream_class_stream_readable).
### Compatibility
Module: version 5 migrated from [CommonJS](https://en.wikipedia.org/wiki/CommonJS) to [pure ECMAScript Module (ESM)](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c).
JavaScript is compliant with [ECMAScript 2019 (ES10)](https://en.wikipedia.org/wiki/ECMAScript#10th_Edition_%E2%80%93_ECMAScript_2019).
Requires Node.js ≥ 14.16 engine.
## Examples
In the following example we read the first 16 bytes from a stream and store them in our buffer.
Source code of examples can be found [here](test/examples.ts).
```js
import fs from 'node:fs';
import { StreamReader } from 'peek-readable';
(async () => {
const readable = fs.createReadStream('JPEG_example_JPG_RIP_001.jpg');
const streamReader = new StreamReader(readable);
const uint8Array = new Uint8Array(16);
const bytesRead = await streamReader.read(uint8Array, 0, 16);;
// buffer contains 16 bytes, if the end-of-stream has not been reached
})();
```
End-of-stream detection:
```js
(async () => {
const fileReadStream = fs.createReadStream('JPEG_example_JPG_RIP_001.jpg');
const streamReader = new StreamReader(fileReadStream);
const buffer = Buffer.alloc(16); // or use: new Uint8Array(16);
try {
await streamReader.read(buffer, 0, 16);
// buffer contains 16 bytes, if the end-of-stream has not been reached
} catch(error) {
if (error instanceof EndOfStreamError) {
console.log('End-of-stream reached');
}
}
})();
```
With peek you can read ahead:
```js
import fs from 'node:fs';
import { StreamReader } from 'peek-readable';
const fileReadStream = fs.createReadStream('JPEG_example_JPG_RIP_001.jpg');
const streamReader = new StreamReader(fileReadStream);
const buffer = Buffer.alloc(20);
(async () => {
let bytesRead = await streamReader.peek(buffer, 0, 3);
if (bytesRead === 3 && buffer[0] === 0xFF && buffer[1] === 0xD8 && buffer[2] === 0xFF) {
console.log('This is a JPEG file');
} else {
throw Error('Expected a JPEG file');
}
bytesRead = await streamReader.read(buffer, 0, 20); // Read JPEG header
if (bytesRead === 20) {
console.log('Got the JPEG header');
} else {
throw Error('Failed to read JPEG header');
}
})();
```

@ -0,0 +1,6 @@
export declare class Deferred<T> {
promise: Promise<T>;
resolve: (value: T) => void;
reject: (reason: any) => void;
constructor();
}

@ -0,0 +1,10 @@
export class Deferred {
constructor() {
this.resolve = () => null;
this.reject = () => null;
this.promise = new Promise((resolve, reject) => {
this.reject = reject;
this.resolve = resolve;
});
}
}

@ -0,0 +1,7 @@
export declare const defaultMessages = "End-Of-Stream";
/**
* Thrown on read operation of the end of file or stream has been reached
*/
export declare class EndOfStreamError extends Error {
constructor();
}

@ -0,0 +1,9 @@
export const defaultMessages = 'End-Of-Stream';
/**
* Thrown on read operation of the end of file or stream has been reached
*/
export class EndOfStreamError extends Error {
constructor() {
super(defaultMessages);
}
}

@ -0,0 +1,47 @@
/// <reference types="node" resolution-mode="require"/>
import { Readable } from 'node:stream';
export { EndOfStreamError } from './EndOfFileStream.js';
export declare class StreamReader {
private s;
/**
* Deferred used for postponed read request (as not data is yet available to read)
*/
private deferred;
private endOfStream;
/**
* Store peeked data
* @type {Array}
*/
private peekQueue;
constructor(s: Readable);
/**
* Read ahead (peek) from stream. Subsequent read or peeks will return the same data
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
* @param offset - Offset target
* @param length - Number of bytes to read
* @returns Number of bytes peeked
*/
peek(uint8Array: Uint8Array, offset: number, length: number): Promise<number>;
/**
* Read chunk from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param offset - Offset target
* @param length - Number of bytes to read
* @returns Number of bytes read
*/
read(buffer: Uint8Array, offset: number, length: number): Promise<number>;
/**
* Read chunk from stream
* @param buffer Target Uint8Array (or Buffer) to store data read from stream in
* @param offset Offset target
* @param length Number of bytes to read
* @returns Number of bytes read
*/
private readFromStream;
/**
* Process deferred read request
* @param request Deferred read request
*/
private readDeferred;
private reject;
}

@ -0,0 +1,129 @@
import { EndOfStreamError } from './EndOfFileStream.js';
import { Deferred } from './Deferred.js';
export { EndOfStreamError } from './EndOfFileStream.js';
const maxStreamReadSize = 1 * 1024 * 1024; // Maximum request length on read-stream operation
export class StreamReader {
constructor(s) {
this.s = s;
/**
* Deferred used for postponed read request (as not data is yet available to read)
*/
this.deferred = null;
this.endOfStream = false;
/**
* Store peeked data
* @type {Array}
*/
this.peekQueue = [];
if (!s.read || !s.once) {
throw new Error('Expected an instance of stream.Readable');
}
this.s.once('end', () => this.reject(new EndOfStreamError()));
this.s.once('error', err => this.reject(err));
this.s.once('close', () => this.reject(new Error('Stream closed')));
}
/**
* Read ahead (peek) from stream. Subsequent read or peeks will return the same data
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
* @param offset - Offset target
* @param length - Number of bytes to read
* @returns Number of bytes peeked
*/
async peek(uint8Array, offset, length) {
const bytesRead = await this.read(uint8Array, offset, length);
this.peekQueue.push(uint8Array.subarray(offset, offset + bytesRead)); // Put read data back to peek buffer
return bytesRead;
}
/**
* Read chunk from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param offset - Offset target
* @param length - Number of bytes to read
* @returns Number of bytes read
*/
async read(buffer, offset, length) {
if (length === 0) {
return 0;
}
if (this.peekQueue.length === 0 && this.endOfStream) {
throw new EndOfStreamError();
}
let remaining = length;
let bytesRead = 0;
// consume peeked data first
while (this.peekQueue.length > 0 && remaining > 0) {
const peekData = this.peekQueue.pop(); // Front of queue
if (!peekData)
throw new Error('peekData should be defined');
const lenCopy = Math.min(peekData.length, remaining);
buffer.set(peekData.subarray(0, lenCopy), offset + bytesRead);
bytesRead += lenCopy;
remaining -= lenCopy;
if (lenCopy < peekData.length) {
// remainder back to queue
this.peekQueue.push(peekData.subarray(lenCopy));
}
}
// continue reading from stream if required
while (remaining > 0 && !this.endOfStream) {
const reqLen = Math.min(remaining, maxStreamReadSize);
const chunkLen = await this.readFromStream(buffer, offset + bytesRead, reqLen);
bytesRead += chunkLen;
if (chunkLen < reqLen)
break;
remaining -= chunkLen;
}
return bytesRead;
}
/**
* Read chunk from stream
* @param buffer Target Uint8Array (or Buffer) to store data read from stream in
* @param offset Offset target
* @param length Number of bytes to read
* @returns Number of bytes read
*/
async readFromStream(buffer, offset, length) {
const readBuffer = this.s.read(length);
if (readBuffer) {
buffer.set(readBuffer, offset);
return readBuffer.length;
}
else {
const request = {
buffer,
offset,
length,
deferred: new Deferred()
};
this.deferred = request.deferred;
this.s.once('readable', () => {
this.readDeferred(request);
});
return request.deferred.promise;
}
}
/**
* Process deferred read request
* @param request Deferred read request
*/
readDeferred(request) {
const readBuffer = this.s.read(request.length);
if (readBuffer) {
request.buffer.set(readBuffer, request.offset);
request.deferred.resolve(readBuffer.length);
this.deferred = null;
}
else {
this.s.once('readable', () => {
this.readDeferred(request);
});
}
}
reject(err) {
this.endOfStream = true;
if (this.deferred) {
this.deferred.reject(err);
this.deferred = null;
}
}
}

@ -0,0 +1,2 @@
export { EndOfStreamError } from './EndOfFileStream.js';
export { StreamReader } from './StreamReader.js';

@ -0,0 +1,2 @@
export { EndOfStreamError } from './EndOfFileStream.js';
export { StreamReader } from './StreamReader.js';

@ -0,0 +1,73 @@
{
"name": "peek-readable",
"version": "5.0.0",
"description": "Read and peek from a readable stream",
"author": {
"name": "Borewit",
"url": "https://github.com/Borewit"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
},
"scripts": {
"clean": "del-cli lib/**/*.js lib/**/*.js.map lib/**/*.d.ts test/**/*.js test/**/*.js.map coverage .nyc_output",
"build": "npm run clean && npm run compile",
"compile-src": "tsc -p lib",
"compile-test": "tsc -p test",
"compile": "npm run compile-src && yarn run compile-test",
"eslint": "eslint lib test --ext .ts --ignore-pattern *.d.ts",
"lint-md": "remark -u preset-lint-recommended .",
"lint": "npm run lint-md && npm run eslint",
"test": "mocha",
"test-coverage": "c8 npm run test",
"start": "npm run compile && npm run lint && npm run cover-test"
},
"engines": {
"node": ">=14.16"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Borewit/peek-readable"
},
"license": "MIT",
"type": "module",
"exports": "./lib/index.js",
"types": "lib/index.d.ts",
"bugs": {
"url": "https://github.com/Borewit/peek-readable/issues"
},
"files": [
"lib/**/*.js",
"lib/**/*.d.ts"
],
"devDependencies": {
"@types/chai": "^4.3.1",
"@types/mocha": "^9.1.1",
"@types/node": "^18.6.3",
"@typescript-eslint/eslint-plugin": "^4.33.0",
"@typescript-eslint/parser": "^4.33.0",
"c8": "^7.12.0",
"chai": "^4.3.6",
"coveralls": "^3.1.1",
"del-cli": "^5.0.0",
"eslint": "^7.32.0",
"eslint-config-prettier": "^8.5.0",
"eslint-import-resolver-typescript": "^3.4.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsdoc": "^39.3.4",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-unicorn": "^40.1.0",
"mocha": "^10.0.0",
"remark-cli": "^11.0.0",
"remark-preset-lint-recommended": "^6.1.2",
"ts-node": "^10.9.1",
"typescript": "^4.7.4"
},
"keywords": [
"readable",
"buffer",
"stream",
"read"
]
}

@ -0,0 +1,71 @@
![Karma CI](https://github.com/Borewit/readable-web-to-node-stream/workflows/Karma%20CI/badge.svg)
[![NPM version](https://badge.fury.io/js/readable-web-to-node-stream.svg)](https://npmjs.org/package/readable-web-to-node-stream)
[![npm downloads](http://img.shields.io/npm/dm/readable-web-to-node-stream.svg)](https://npmcharts.com/compare/readable-web-to-node-stream)
[![dependencies Status](https://david-dm.org/Borewit/readable-web-to-node-stream/status.svg)](https://david-dm.org/Borewit/readable-web-to-node-stream)
[![Known Vulnerabilities](https://snyk.io/test/github/Borewit/readable-web-to-node-stream/badge.svg?targetFile=package.json)](https://snyk.io/test/github/Borewit/readable-web-to-node-stream?targetFile=package.json)
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/d4b511481b3a4634b6ca5c0724407eb9)](https://www.codacy.com/gh/Borewit/peek-readable/dashboard?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=Borewit/peek-readable&amp;utm_campaign=Badge_Grade)
[![Coverage Status](https://coveralls.io/repos/github/Borewit/readable-web-to-node-stream/badge.svg?branch=master)](https://coveralls.io/github/Borewit/readable-web-to-node-stream?branch=master)
[![Minified size](https://badgen.net/bundlephobia/min/readable-web-to-node-stream)](https://bundlephobia.com/result?p=readable-web-to-node-stream)
# readable-web-to-node-stream
Converts a [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader) into a [Node.js readable stream](https://nodejs.org/api/stream.html#stream_readable_streams).
## Installation
Install via [npm](http://npmjs.org/):
```bash
npm install readable-web-to-node-stream
```
or or [yarn](https://yarnpkg.com/):
```bash
yarn add readable-web-to-node-stream
```
## Compatibility
Source is written in TypeScript and compiled to ECMAScript 2017 (ES8).
Unit tests are performed on the following browsers:
* Google Chrome 74.0
* Firefox 68.0
* Safari 12.0
* Opera 60.0
## Example
Import readable-web-stream-to-node in JavaScript:
```js
const {ReadableWebToNodeStream} = require('readable-web-to-node-stream');
async function download(url) {
const response = await fetch(url);
const readableWebStream = response.body;
const nodeStream = new ReadableWebToNodeStream(readableWebStream);
}
```
## API
**constructor(stream: ReadableStream): Promise<void>**
`stream: ReadableStream`: the [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader).
**close(): Promise<void>**
Will cancel close the Readable-node stream, and will release Web-API-readable-stream.
**waitForReadToComplete(): Promise<void>**
If there is no unresolved read call to Web-API ReadableStream immediately returns, otherwise it will wait until the read is resolved.
## Licence
(The MIT License)
Copyright (c) 2019 Borewit
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,39 @@
import { Readable } from 'readable-stream';
/**
* Converts a Web-API stream into Node stream.Readable class
* Node stream readable: https://nodejs.org/api/stream.html#stream_readable_streams
* Web API readable-stream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
* Node readable stream: https://nodejs.org/api/stream.html#stream_readable_streams
*/
export declare class ReadableWebToNodeStream extends Readable {
bytesRead: number;
released: boolean;
/**
* Default web API stream reader
* https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader
*/
private reader;
private pendingRead;
/**
*
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
*/
constructor(stream: ReadableStream);
/**
* Implementation of readable._read(size).
* When readable._read() is called, if data is available from the resource,
* the implementation should begin pushing that data into the read queue
* https://nodejs.org/api/stream.html#stream_readable_read_size_1
*/
_read(): Promise<void>;
/**
* If there is no unresolved read call to Web-API ReadableStream immediately returns;
* otherwise will wait until the read is resolved.
*/
waitForReadToComplete(): Promise<void>;
/**
* Close wrapper
*/
close(): Promise<void>;
private syncAndRelease;
}

@ -0,0 +1,69 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadableWebToNodeStream = void 0;
const readable_stream_1 = require("readable-stream");
/**
* Converts a Web-API stream into Node stream.Readable class
* Node stream readable: https://nodejs.org/api/stream.html#stream_readable_streams
* Web API readable-stream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
* Node readable stream: https://nodejs.org/api/stream.html#stream_readable_streams
*/
class ReadableWebToNodeStream extends readable_stream_1.Readable {
/**
*
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
*/
constructor(stream) {
super();
this.bytesRead = 0;
this.released = false;
this.reader = stream.getReader();
}
/**
* Implementation of readable._read(size).
* When readable._read() is called, if data is available from the resource,
* the implementation should begin pushing that data into the read queue
* https://nodejs.org/api/stream.html#stream_readable_read_size_1
*/
async _read() {
// Should start pushing data into the queue
// Read data from the underlying Web-API-readable-stream
if (this.released) {
this.push(null); // Signal EOF
return;
}
this.pendingRead = this.reader.read();
const data = await this.pendingRead;
// clear the promise before pushing pushing new data to the queue and allow sequential calls to _read()
delete this.pendingRead;
if (data.done || this.released) {
this.push(null); // Signal EOF
}
else {
this.bytesRead += data.value.length;
this.push(data.value); // Push new data to the queue
}
}
/**
* If there is no unresolved read call to Web-API ReadableStream immediately returns;
* otherwise will wait until the read is resolved.
*/
async waitForReadToComplete() {
if (this.pendingRead) {
await this.pendingRead;
}
}
/**
* Close wrapper
*/
async close() {
await this.syncAndRelease();
}
async syncAndRelease() {
this.released = true;
await this.waitForReadToComplete();
await this.reader.releaseLock();
}
}
exports.ReadableWebToNodeStream = ReadableWebToNodeStream;
//# sourceMappingURL=index.js.map

@ -0,0 +1,147 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseReadableStream = void 0;
localStorage.debug = 'readable-web-to-node-stream';
const assert = require("assert");
const mmb = require("music-metadata-browser");
const index_1 = require("./index");
async function httpGetByUrl(url) {
const response = await fetch(url);
const headers = [];
response.headers.forEach(header => {
headers.push(header);
});
assert.ok(response.ok, `HTTP error status=${response.status}: ${response.statusText}`);
assert.ok(response.body, 'HTTP-stream');
return response;
}
async function parseReadableStream(stream, fileInfo, options) {
const ns = new index_1.ReadableWebToNodeStream(stream);
const res = await mmb.parseNodeStream(ns, fileInfo, options);
await ns.close();
return res;
}
exports.parseReadableStream = parseReadableStream;
const tiuqottigeloot_vol24_Tracks = [
{
url: '/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/01%20-%20Diablo%20Swing%20Orchestra%20-%20Heroines.mp3',
duration: 322.612245,
metaData: {
title: 'Heroines',
artist: 'Diablo Swing Orchestra'
}
},
{
url: '/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/02%20-%20Eclectek%20-%20We%20Are%20Going%20To%20Eclecfunk%20Your%20Ass.mp3',
duration: 190.093061,
metaData: {
title: 'We Are Going to Eclecfunk Your Ass',
artist: 'Eclectek'
}
} /* ,
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/03%20-%20Auto-Pilot%20-%20Seventeen.mp3',
duration: 214.622041,
metaData: {
title: 'Seventeen',
artist: 'Auto-Pilot'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/04%20-%20Muha%20-%20Microphone.mp3',
duration: 181.838367,
metaData: {
title: 'Microphone',
artist: 'Muha'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/05%20-%20Just%20Plain%20Ant%20-%20Stumble.mp3',
duration: 86.047347,
metaData: {
title: 'Stumble',
artist: 'Just Plain Ant'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/06%20-%20Sleaze%20-%20God%20Damn.mp3',
duration: 226.795102,
metaData: {
title: 'God Damn',
artist: 'Sleaze'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/07%20-%20Juanitos%20-%20Hola%20Hola%20Bossa%20Nova.mp3',
duration: 207.072653,
metaData: {
title: 'Hola Hola Bossa Nova',
artist: 'Juanitos'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/08%20-%20Entertainment%20For%20The%20Braindead%20-%20Resolutions%20(Chris%20Summer%20Remix).mp3',
duration: 314.331429,
metaData: {
title: 'Resolutions (Chris Summer remix)',
artist: 'Entertainment for the Braindead'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/09%20-%20Nobara%20Hayakawa%20-%20Trail.mp3',
duration: 204.042449,
metaData: {
title: 'Trail',
artist: 'Nobara Hayakawa'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/10%20-%20Paper%20Navy%20-%20Tongue%20Tied.mp3',
duration: 201.116735,
metaData: {
title: 'Tongue Tied',
artist: 'Paper Navy'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/11%20-%2060%20Tigres%20-%20Garage.mp3',
duration: 245.394286,
metaData: {
title: 'Garage',
artist: '60 Tigres'
}
},
{
url:
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/12%20-%20CM%20aka%20Creative%20-%20The%20Cycle%20(Featuring%20Mista%20Mista).mp3',
duration: 221.44,
metaData: {
title: 'The Cycle (feat. Mista Mista)',
artist: 'CM aka Creative'
}
} */
];
describe('Parse WebAmp tracks', () => {
tiuqottigeloot_vol24_Tracks.forEach(track => {
it(`track ${track.metaData.artist} - ${track.metaData.title}`, async () => {
const url = 'https://raw.githubusercontent.com/Borewit/test-audio/958e057' + track.url;
const response = await httpGetByUrl(url);
const metadata = await parseReadableStream(response.body, {
size: parseInt(response.headers.get('Content-Length'), 10),
mimeType: response.headers.get('Content-Type')
});
expect(metadata.common.artist).toEqual(track.metaData.artist);
expect(metadata.common.title).toEqual(track.metaData.title);
}, 20000);
});
});
//# sourceMappingURL=index.spec.js.map

@ -0,0 +1,84 @@
{
"name": "readable-web-to-node-stream",
"version": "3.0.2",
"description": "Converts a Web-API readable-stream into a Node readable-stream.",
"main": "lib/index.js",
"files": [
"lib/**/*.js",
"lib/**/*.d.ts"
],
"engines": {
"node": ">=8"
},
"types": "lib/index.d.ts",
"scripts": {
"clean": "del-cli lib/**/*.js lib/**/*.js.map lib/**/*.d.ts coverage",
"compile-lib": "tsc -p lib/tsconfig.json",
"compile-test": "tsc -p lib/tsconfig.spec.json",
"prepublishOnly": "yarn run build",
"build": "npm run compile-lib && npm run compile-test",
"tslint": "tslint 'lib/**/*.ts' --exclude 'lib/**/*.d.ts'",
"eslint": "eslint karma.conf.js",
"lint": "npm run tslint && npm run eslint",
"test": "karma start --single-run",
"karma": "karma start",
"karma-firefox": "karma start --browsers Firefox",
"karma-once": "karma start --browsers Chrome --single-run",
"travis-karma": "karma start --browsers Firefox --single-run --reporters coverage-istanbul,spec",
"browserstack": "karma start --browsers bs_win_chrome,bs_win_firefox,bs_osx_safari --single-run --reporters coverage-istanbul,spec",
"travis-karma-browserstack": "karma start --browsers bs_win_chrome,bs_win_firefox,bs_osx_safari --single-run --reporters coverage-istanbul,spec,BrowserStack",
"post-coveralls": "coveralls < coverage/lcov.info",
"post-codacy": " codacy-coverage < coverage/lcov.info"
},
"keywords": [
"stream.readable",
"web",
"node",
"browser",
"stream",
"covert",
"coverter",
"readable",
"readablestream"
],
"repository": "https://github.com/Borewit/readable-web-to-node-stream.git",
"author": {
"name": "Borewit",
"url": "https://github.com/Borewit"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/Borewit/readable-web-to-node-stream/issues"
},
"dependencies": {
"readable-stream": "^3.6.0"
},
"devDependencies": {
"@types/jasmine": "^3.8.1",
"@types/node": "^16.3.1",
"@types/readable-stream": "^2.3.9",
"coveralls": "^3.1.0",
"del-cli": "^3.0.1",
"eslint": "^7.18.0",
"istanbul-instrumenter-loader": "^3.0.1",
"jasmine-core": "^3.8.0",
"karma": "^6.3.4",
"karma-browserstack-launcher": "^1.6.0",
"karma-chrome-launcher": "^3.1.0",
"karma-coverage-istanbul-reporter": "^3.0.3",
"karma-firefox-launcher": "^2.1.0",
"karma-jasmine": "^4.0.1",
"karma-jasmine-html-reporter": "^1.7.0",
"karma-spec-reporter": "^0.0.32",
"karma-webpack": "^5.0.0",
"music-metadata-browser": "^2.2.7",
"ts-loader": "^8.0.14",
"tslint": "^6.1.3",
"typescript": "^4.3.5",
"webpack": "^4.46.0"
}
}

15
node_modules/strtok3/LICENSE generated vendored

@ -0,0 +1,15 @@
Copyright (c) 2017, Borewit
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

306
node_modules/strtok3/README.md generated vendored

@ -0,0 +1,306 @@
![Node.js CI](https://github.com/Borewit/strtok3/workflows/Node.js%20CI/badge.svg)
[![NPM version](https://badge.fury.io/js/strtok3.svg)](https://npmjs.org/package/strtok3)
[![npm downloads](http://img.shields.io/npm/dm/strtok3.svg)](https://npmcharts.com/compare/strtok3,token-types?start=1200&interval=30)
[![DeepScan grade](https://deepscan.io/api/teams/5165/projects/8526/branches/103329/badge/grade.svg)](https://deepscan.io/dashboard#view=project&tid=5165&pid=8526&bid=103329)
[![Known Vulnerabilities](https://snyk.io/test/github/Borewit/strtok3/badge.svg?targetFile=package.json)](https://snyk.io/test/github/Borewit/strtok3?targetFile=package.json)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/Borewit/strtok3.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/strtok3/alerts/)
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/59dd6795e61949fb97066ca52e6097ef)](https://www.codacy.com/app/Borewit/strtok3?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=Borewit/strtok3&amp;utm_campaign=Badge_Grade)
[![Language grade: JavaScript](https://img.shields.io/lgtm/grade/javascript/g/Borewit/strtok3.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/strtok3/context:javascript)
# strtok3
A promise based streaming [*tokenizer*](#tokenizer) for [Node.js](http://nodejs.org) and browsers.
This node module is a successor of [strtok2](https://github.com/Borewit/strtok2).
The `strtok3` contains a few methods to turn different input into a [*tokenizer*](#tokenizer). Designed to
* Support a streaming environment
* Decoding of binary data, strings and numbers in mind
* Read [predefined](https://github.com/Borewit/token-types) or custom tokens.
* Optimized [*tokenizers*](#tokenizer) for reading from [file](#method-strtok3fromfile), [stream](#method-strtok3fromstream) or [buffer](#method-strtok3frombuffer).
It can read from:
* A file (taking a file path as an input)
* A Node.js [stream](https://nodejs.org/api/stream.html).
* A [Buffer](https://nodejs.org/api/buffer.html) or [Uint8Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array)
* HTTP chunked transfer provided by [@tokenizer/http](https://github.com/Borewit/tokenizer-http).
* Chunked [Amazon S3](https://aws.amazon.com/s3) access provided by [@tokenizer/s3](https://github.com/Borewit/tokenizer-s3).
## Installation
```sh
npm install strtok3
```
### Compatibility
Module: version 7 migrated from [CommonJS](https://en.wikipedia.org/wiki/CommonJS) to [pure ECMAScript Module (ESM)](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c).
JavaScript is compliant with [ECMAScript 2019 (ES10)](https://en.wikipedia.org/wiki/ECMAScript#10th_Edition_%E2%80%93_ECMAScript_2019).
Requires Node.js ≥ 14.16 engine.
## API
Use one of the methods to instantiate an [*abstract tokenizer*](#tokenizer):
* [strtok3.fromFile](#method-strtok3fromfile)
* [strtok3.fromStream](#method-strtok3fromstream)
* [strtok3.fromBuffer](#method-strtok3fromBuffer)
* [strtok3.fromUint8Array](#method-strtok3fromUint8Array)
### strtok3 methods
All of the strtok3 methods return a [*tokenizer*](#tokenizer), either directly or via a promise.
#### Method `strtok3.fromFile()`
| Parameter | Type | Description |
|-----------|-----------------------|----------------------------|
| path | Path to file (string) | Path to file to read from |
> __Note__: that [file-information](#file-information) is automatically added.
Returns, via a promise, a [*tokenizer*](#tokenizer) which can be used to parse a file.
```js
import * as strtok3 from 'strtok3';
import * as Token from 'token-types';
(async () => {
const tokenizer = await strtok3.fromFile("somefile.bin");
try {
const myNumber = await tokenizer.readToken(Token.UINT8);
console.log(`My number: ${myNumber}`);
} finally {
tokenizer.close(); // Close the file
}
})();
```
#### Method `strtok3.fromStream()`
Create [*tokenizer*](#tokenizer) from a node.js [readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable).
| Parameter | Optional | Type | Description |
|-----------|-----------|-----------------------------------------------------------------------------|--------------------------|
| stream | no | [Readable](https://nodejs.org/api/stream.html#stream_class_stream_readable) | Stream to read from |
| fileInfo | yes | [IFileInfo](#IFileInfo) | Provide file information |
Returns a [*tokenizer*](#tokenizer), via a Promise, which can be used to parse a buffer.
```js
import strtok3 from 'strtok3';
import * as Token from 'token-types';
strtok3.fromStream(stream).then(tokenizer => {
return tokenizer.readToken(Token.UINT8).then(myUint8Number => {
console.log(`My number: ${myUint8Number}`);
});
});
```
#### Method `strtok3.fromBuffer()`
| Parameter | Optional | Type | Description |
|------------|----------|--------------------------------------------------|----------------------------------------|
| uint8Array | no | [Uint8Array](https://nodejs.org/api/buffer.html) | Uint8Array or Buffer to read from |
| fileInfo | yes | [IFileInfo](#IFileInfo) | Provide file information |
Returns a [*tokenizer*](#tokenizer) which can be used to parse the provided buffer.
```js
import * as strtok3 from 'strtok3';
const tokenizer = strtok3.fromBuffer(buffer);
tokenizer.readToken(Token.UINT8).then(myUint8Number => {
console.log(`My number: ${myUint8Number}`);
});
```
## Tokenizer
The tokenizer allows us to *read* or *peek* from the *tokenizer-stream*. The *tokenizer-stream* is an abstraction of a [stream](https://nodejs.org/api/stream.html), file or [Buffer](https://nodejs.org/api/buffer.html).
It can also be translated in chunked reads, as done in [@tokenizer/http](https://github.com/Borewit/tokenizer-http);
What is the difference with Nodejs.js stream?
* The *tokenizer-stream* supports jumping / seeking in a the *tokenizer-stream* using [`tokenizer.ignore()`](#method-tokenizerignore)
* In addition to *read* methods, it has *peek* methods, to read a ahead and check what is coming.
The [tokenizer.position](#attribute-tokenizerposition) keeps tracks of the read position.
### strtok3 attributes
#### Attribute `tokenizer.fileInfo`
Optional attribute describing the file information, see [IFileInfo](#IFileInfo)
#### Attribute `tokenizer.position`
Pointer to the current position in the [*tokenizer*](#tokenizer) stream.
If a *position* is provided to a *read* or *peek* method, is should be, at least, equal or greater than this value.
### Tokenizer methods
There are two kind of methods:
1. *read* methods: used to read a *token* of [Buffer](https://nodejs.org/api/buffer.html) from the [*tokenizer*](#tokenizer). The position of the *tokenizer-stream* will advance with the size of the token.
2. *peek* methods: same as the read, but it will *not* advance the pointer. It allows to read (peek) ahead.
#### Method `tokenizer.readBuffer()`
Read buffer from stream.
`readBuffer(buffer, options?)`
| Parameter | Type | Description |
|------------|----------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| buffer | [Buffer](https://nodejs.org/api/buffer.html) &#124; Uint8Array | Target buffer to write the data read to |
| options | [IReadChunkOptions](#ireadchunkoptions) | An integer specifying the number of bytes to read |
Return value `Promise<number>` Promise with number of bytes read. The number of bytes read maybe if less, *mayBeLess* flag was set.
#### Method `tokenizer.peekBuffer()`
Peek (read ahead) buffer from [*tokenizer*](#tokenizer)
`peekBuffer(buffer, options?)`
| Parameter | Type | Description |
|------------|-----------------------------------------|-----------------------------------------------------|
| buffer | Buffer &#124; Uint8Array | Target buffer to write the data read (peeked) to. |
| options | [IReadChunkOptions](#ireadchunkoptions) | An integer specifying the number of bytes to read. | |
Return value `Promise<number>` Promise with number of bytes read. The number of bytes read maybe if less, *mayBeLess* flag was set.
#### Method `tokenizer.readToken()`
Read a *token* from the tokenizer-stream.
`readToken(token, position?)`
| Parameter | Type | Description |
|------------|-------------------------|---------------------------------------------------------------------------------------------------------------------- |
| token | [IGetToken](#IGetToken) | Token to read from the tokenizer-stream. |
| position? | number | Offset where to begin reading within the file. If position is null, data will be read from the current file position. |
Return value `Promise<number>`. Promise with number of bytes read. The number of bytes read maybe if less, *mayBeLess* flag was set.
#### Method `tokenizer.peekToken()`
Peek a *token* from the [*tokenizer*](#tokenizer).
`peekToken(token, position?)`
| Parameter | Type | Description |
|------------|----------------------------|-------------------------------------------------------------------------------------------------------------------------|
| token | [IGetToken<T>](#IGetToken) | Token to read from the tokenizer-stream. |
| position? | number | Offset where to begin reading within the file. If position is null, data will be read from the current file position. |
Return value `Promise<T>` Promise with token value peeked from the [*tokenizer*](#tokenizer).
#### Method `tokenizer.readNumber()`
Peek a numeric [*token*](#token) from the [*tokenizer*](#tokenizer).
`readNumber(token)`
| Parameter | Type | Description |
|------------|---------------------------------|----------------------------------------------------|
| token | [IGetToken<number>](#IGetToken) | Numeric token to read from the tokenizer-stream. |
Return value `Promise<number>` Promise with number peeked from the *tokenizer-stream*.
#### Method `tokenizer.ignore()`
Advanse the offset pointer with the number of bytes provided.
`ignore(length)`
| Parameter | Type | Description |
|------------|--------|----------------------------------------------------------------------|
| ignore | number | Numeric of bytes to ignore. Will advance the `tokenizer.position` |
Return value `Promise<number>` Promise with number peeked from the *tokenizer-stream*.
#### Method `tokenizer.close()`
Clean up resources, such as closing a file pointer if applicable.
### IReadChunkOptions
Each attribute is optional:
| Attribute | Type | Description |
|-----------|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| offset | number | The offset in the buffer to start writing at; if not provided, start at 0 |
| length | number | Requested number of bytes to read. |
| position | number | Position where to peek from the file. If position is null, data will be read from the [current file position](#attribute-tokenizerposition). Position may not be less then [tokenizer.position](#attribute-tokenizerposition) |
| mayBeLess | boolean | If and only if set, will not throw an EOF error if less then the requested *mayBeLess* could be read. |
Example:
```js
tokenizer.peekBuffer(buffer, {mayBeLess: true});
```
## IFileInfo
File information interface which describes the underlying file, each attribute is optional.
| Attribute | Type | Description |
|-----------|---------|---------------------------------------------------------------------------------------------------|
| size | number | File size in bytes |
| mimeType | number | [MIME-type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types) of file. |
| path | number | File path |
| url | boolean | File URL |
## Token
The *token* is basically a description what to read form the [*tokenizer-stream*](#tokenizer).
A basic set of *token types* can be found here: [*token-types*](https://github.com/Borewit/token-types).
A token is something which implements the following interface:
```ts
export interface IGetToken<T> {
/**
* Length in bytes of encoded value
*/
len: number;
/**
* Decode value from buffer at offset
* @param buf Buffer to read the decoded value from
* @param off Decode offset
*/
get(buf: Buffer, off: number): T;
}
```
The *tokenizer* reads `token.len` bytes from the *tokenizer-stream* into a Buffer.
The `token.get` will be called with the Buffer. `token.get` is responsible for conversion from the buffer to the desired output type.
## Browser compatibility
To exclude fs based dependencies, you can use a submodule-import from 'strtok3/lib/core'.
| function | 'strtok3' | 'strtok3/lib/core' |
| ----------------------| --------------------|---------------------|
| `parseBuffer` | ✓ | ✓ |
| `parseStream` | ✓ | ✓ |
| `fromFile` | ✓ | |
### Working with Web-API readable stream
To convert a [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader) into a [Node.js readable stream]((https://nodejs.org/api/stream.html#stream_readable_streams)), you can use [readable-web-to-node-stream](https://github.com/Borewit/readable-web-to-node-stream) to convert one in another.
Example submodule-import:
```js
import * as strtok3core from 'strtok3/core'; // Submodule-import to prevent Node.js specific dependencies
import { ReadableWebToNodeStream } from 'readable-web-to-node-stream';
(async () => {
const response = await fetch(url);
const readableWebStream = response.body; // Web-API readable stream
const nodeStream = new ReadableWebToNodeStream(readableWebStream); // convert to Node.js readable stream
const tokenizer = strtok3core.fromStream(nodeStream); // And we now have tokenizer in a web environment
})();
```
## Licence
(The MIT License)
Copyright (c) 2020 Borewit
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,69 @@
import { ITokenizer, IFileInfo, IReadChunkOptions } from './types.js';
import { IGetToken, IToken } from '@tokenizer/token';
interface INormalizedReadChunkOptions extends IReadChunkOptions {
offset: number;
length: number;
position: number;
mayBeLess?: boolean;
}
/**
* Core tokenizer
*/
export declare abstract class AbstractTokenizer implements ITokenizer {
fileInfo: IFileInfo;
protected constructor(fileInfo?: IFileInfo);
/**
* Tokenizer-stream position
*/
position: number;
private numBuffer;
/**
* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
abstract readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array- Target buffer to fill with data peek from the tokenizer-stream
* @param options - Peek behaviour options
* @returns Promise with number of bytes read
*/
abstract peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
readToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
peekToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IToken<number>): Promise<number>;
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
ignore(length: number): Promise<number>;
close(): Promise<void>;
protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): INormalizedReadChunkOptions;
}
export {};

@ -0,0 +1,101 @@
import { EndOfStreamError } from 'peek-readable';
import { Buffer } from 'node:buffer';
/**
* Core tokenizer
*/
export class AbstractTokenizer {
constructor(fileInfo) {
/**
* Tokenizer-stream position
*/
this.position = 0;
this.numBuffer = new Uint8Array(8);
this.fileInfo = fileInfo ? fileInfo : {};
}
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
async readToken(token, position = this.position) {
const uint8Array = Buffer.alloc(token.len);
const len = await this.readBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
async peekToken(token, position = this.position) {
const uint8Array = Buffer.alloc(token.len);
const len = await this.peekBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async readNumber(token) {
const len = await this.readBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async peekNumber(token) {
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
async ignore(length) {
if (this.fileInfo.size !== undefined) {
const bytesLeft = this.fileInfo.size - this.position;
if (length > bytesLeft) {
this.position += bytesLeft;
return bytesLeft;
}
}
this.position += length;
return length;
}
async close() {
// empty
}
normalizeOptions(uint8Array, options) {
if (options && options.position !== undefined && options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (options) {
return {
mayBeLess: options.mayBeLess === true,
offset: options.offset ? options.offset : 0,
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)),
position: options.position ? options.position : this.position
};
}
return {
mayBeLess: false,
offset: 0,
length: uint8Array.length,
position: this.position
};
}
}

@ -0,0 +1,26 @@
import { IFileInfo, IReadChunkOptions } from './types.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export declare class BufferTokenizer extends AbstractTokenizer {
private uint8Array;
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
*/
constructor(uint8Array: Uint8Array, fileInfo?: IFileInfo);
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
}

@ -0,0 +1,51 @@
import { EndOfStreamError } from 'peek-readable';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export class BufferTokenizer extends AbstractTokenizer {
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
*/
constructor(uint8Array, fileInfo) {
super(fileInfo);
this.uint8Array = uint8Array;
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
}
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async readBuffer(uint8Array, options) {
if (options && options.position) {
if (options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
this.position = options.position;
}
const bytesRead = await this.peekBuffer(uint8Array, options);
this.position += bytesRead;
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
else {
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
return bytes2read;
}
}
async close() {
// empty
}
}

@ -0,0 +1,22 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { IFileInfo, IReadChunkOptions } from './types.js';
export declare class FileTokenizer extends AbstractTokenizer {
private fd;
constructor(fd: number, fileInfo: IFileInfo);
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
}
export declare function fromFile(sourceFilePath: string): Promise<FileTokenizer>;

@ -0,0 +1,50 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError } from 'peek-readable';
import * as fs from './FsPromise.js';
export class FileTokenizer extends AbstractTokenizer {
constructor(fd, fileInfo) {
super(fileInfo);
this.fd = fd;
}
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
this.position = normOptions.position;
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
this.position += res.bytesRead;
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
async close() {
return fs.close(this.fd);
}
}
export async function fromFile(sourceFilePath) {
const stat = await fs.stat(sourceFilePath);
if (!stat.isFile) {
throw new Error(`File not a file: ${sourceFilePath}`);
}
const fd = await fs.open(sourceFilePath, 'r');
return new FileTokenizer(fd, { path: sourceFilePath, size: stat.size });
}

@ -0,0 +1,19 @@
/**
* Module convert fs functions to promise based functions
*/
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import fs from 'node:fs';
export interface IReadResult {
bytesRead: number;
buffer: Uint8Array;
}
export declare const pathExists: typeof fs.existsSync;
export declare const createReadStream: typeof fs.createReadStream;
export declare function stat(path: fs.PathLike): Promise<fs.Stats>;
export declare function close(fd: number): Promise<void>;
export declare function open(path: fs.PathLike, mode: fs.Mode): Promise<number>;
export declare function read(fd: number, buffer: Uint8Array, offset: number, length: number, position: number): Promise<IReadResult>;
export declare function writeFile(path: fs.PathLike, data: Buffer | string): Promise<void>;
export declare function writeFileSync(path: fs.PathLike, data: Buffer | string): void;
export declare function readFile(path: fs.PathLike): Promise<Buffer>;

@ -0,0 +1,69 @@
/**
* Module convert fs functions to promise based functions
*/
import fs from 'node:fs';
export const pathExists = fs.existsSync;
export const createReadStream = fs.createReadStream;
export async function stat(path) {
return new Promise((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err)
reject(err);
else
resolve(stats);
});
});
}
export async function close(fd) {
return new Promise((resolve, reject) => {
fs.close(fd, err => {
if (err)
reject(err);
else
resolve();
});
});
}
export async function open(path, mode) {
return new Promise((resolve, reject) => {
fs.open(path, mode, (err, fd) => {
if (err)
reject(err);
else
resolve(fd);
});
});
}
export async function read(fd, buffer, offset, length, position) {
return new Promise((resolve, reject) => {
fs.read(fd, buffer, offset, length, position, (err, bytesRead, _buffer) => {
if (err)
reject(err);
else
resolve({ bytesRead, buffer: _buffer });
});
});
}
export async function writeFile(path, data) {
return new Promise((resolve, reject) => {
fs.writeFile(path, data, err => {
if (err)
reject(err);
else
resolve();
});
});
}
export function writeFileSync(path, data) {
fs.writeFileSync(path, data);
}
export async function readFile(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, (err, buffer) => {
if (err)
reject(err);
else
resolve(buffer);
});
});
}

@ -0,0 +1,28 @@
/// <reference types="node" resolution-mode="require"/>
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { Readable } from 'node:stream';
import { IFileInfo, IReadChunkOptions } from './types.js';
export declare class ReadStreamTokenizer extends AbstractTokenizer {
private streamReader;
constructor(stream: Readable, fileInfo?: IFileInfo);
/**
* Get file information, an HTTP-client may implement this doing a HEAD request
* @return Promise with file information
*/
getFileInfo(): Promise<IFileInfo>;
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
ignore(length: number): Promise<number>;
}

@ -0,0 +1,94 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError, StreamReader } from 'peek-readable';
const maxBufferSize = 256000;
export class ReadStreamTokenizer extends AbstractTokenizer {
constructor(stream, fileInfo) {
super(fileInfo);
this.streamReader = new StreamReader(stream);
}
/**
* Get file information, an HTTP-client may implement this doing a HEAD request
* @return Promise with file information
*/
async getFileInfo() {
return this.fileInfo;
}
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(uint8Array, options);
}
else if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (normOptions.length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
let bytesRead = 0;
if (normOptions.position) {
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
return bytesRead - skipBytes;
}
else if (skipBytes < 0) {
throw new Error('Cannot peek from a negative offset in a stream');
}
}
if (normOptions.length > 0) {
try {
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
}
catch (err) {
if (options && options.mayBeLess && err instanceof EndOfStreamError) {
return 0;
}
throw err;
}
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
}
return bytesRead;
}
async ignore(length) {
// debug(`ignore ${this.position}...${this.position + length - 1}`);
const bufSize = Math.min(maxBufferSize, length);
const buf = new Uint8Array(bufSize);
let totBytesRead = 0;
while (totBytesRead < length) {
const remaining = length - totBytesRead;
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
if (bytesRead < 0) {
return bytesRead;
}
totBytesRead += bytesRead;
}
return totBytesRead;
}
}

@ -0,0 +1,23 @@
/// <reference types="node" resolution-mode="require"/>
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { Readable } from 'node:stream';
import { BufferTokenizer } from './BufferTokenizer.js';
import { IFileInfo } from './types.js';
export { EndOfStreamError } from 'peek-readable';
export { ITokenizer, IFileInfo } from './types.js';
export { IToken, IGetToken } from '@tokenizer/token';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer
*/
export declare function fromStream(stream: Readable, fileInfo?: IFileInfo): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
* @returns BufferTokenizer
*/
export declare function fromBuffer(uint8Array: Uint8Array, fileInfo?: IFileInfo): BufferTokenizer;

23
node_modules/strtok3/lib/core.js generated vendored

@ -0,0 +1,23 @@
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
export { EndOfStreamError } from 'peek-readable';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer
*/
export function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};
return new ReadStreamTokenizer(stream, fileInfo);
}
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
* @returns BufferTokenizer
*/
export function fromBuffer(uint8Array, fileInfo) {
return new BufferTokenizer(uint8Array, fileInfo);
}

@ -0,0 +1,15 @@
/// <reference types="node" resolution-mode="require"/>
import { Readable } from 'node:stream';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core.js';
export { IToken, IGetToken } from '@tokenizer/token';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param fileInfo - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export declare function fromStream(stream: Readable, fileInfo?: core.IFileInfo): Promise<ReadStreamTokenizer>;

20
node_modules/strtok3/lib/index.js generated vendored

@ -0,0 +1,20 @@
import * as fs from './FsPromise.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { EndOfStreamError, fromBuffer } from './core.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param fileInfo - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export async function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};
if (stream.path) {
const stat = await fs.stat(stream.path);
fileInfo.path = stream.path;
fileInfo.size = stat.size;
}
return core.fromStream(stream, fileInfo);
}

103
node_modules/strtok3/lib/types.d.ts generated vendored

@ -0,0 +1,103 @@
/// <reference types="node" resolution-mode="require"/>
import { IGetToken } from '@tokenizer/token';
export interface IFileInfo {
/**
* File size in bytes
*/
size?: number;
/**
* MIME-type of file
*/
mimeType?: string;
/**
* File path
*/
path?: string;
/**
* File URL
*/
url?: string;
}
export interface IReadChunkOptions {
/**
* The offset in the buffer to start writing at; default is 0
*/
offset?: number;
/**
* Number of bytes to read.
*/
length?: number;
/**
* Position where to begin reading from the file.
* Default it is `tokenizer.position`.
* Position may not be less then `tokenizer.position`.
*/
position?: number;
/**
* If set, will not throw an EOF error if not all of the requested data could be read
*/
mayBeLess?: boolean;
}
/**
* The tokenizer allows us to read or peek from the tokenizer-stream.
* The tokenizer-stream is an abstraction of a stream, file or Buffer.
*/
export interface ITokenizer {
/**
* Provide access to information of the underlying information stream or file.
*/
fileInfo: IFileInfo;
/**
* Offset in bytes (= number of bytes read) since beginning of file or stream
*/
position: number;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peeked from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @param maybeless - If set, will not throw an EOF error if the less then the requested length could be read.
*/
peekToken<T>(token: IGetToken<T>, position?: number | null, maybeless?: boolean): Promise<T>;
/**
* Read a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
*/
readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
/**
* Peek a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IGetToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IGetToken<number>): Promise<number>;
/**
* Ignore given number of bytes
* @param length - Number of bytes ignored
*/
ignore(length: number): Promise<number>;
/**
* Clean up resources.
* It does not close the stream for StreamReader, but is does close the file-descriptor.
*/
close(): Promise<void>;
}

@ -0,0 +1 @@
export {};

94
node_modules/strtok3/package.json generated vendored

@ -0,0 +1,94 @@
{
"name": "strtok3",
"version": "7.0.0",
"description": "A promise based streaming tokenizer",
"author": {
"name": "Borewit",
"url": "https://github.com/Borewit"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
},
"scripts": {
"clean": "del-cli lib/**/*.js lib/**/*.js.map lib/**/*.d.ts test/**/*.js test/**/*.js.map",
"compile-src": "tsc -p lib",
"compile-test": "tsc -p test",
"compile": "npm run compile-src && npm run compile-test",
"build": "npm run clean && npm run compile",
"eslint": "eslint lib test --ext .ts --ignore-pattern *.d.ts",
"lint-md": "remark -u preset-lint-recommended .",
"lint": "npm run lint-md && npm run eslint",
"fix": "eslint lib test --ext .ts --ignore-pattern *.d.ts --fix",
"test": "mocha",
"test-coverage": "c8 npm run test",
"send-codacy": "c8 report --reporter=text-lcov | codacy-coverage",
"start": "npm run compile && npm run lint && npm run cover-test"
},
"engines": {
"node": ">=14.16"
},
"repository": {
"type": "git",
"url": "https://github.com/Borewit/strtok3.git"
},
"license": "MIT",
"type": "module",
"exports": {
".": {
"node": "./lib/index.js",
"default": "./lib/core.js"
},
"./core": "./lib/core.js"
},
"types": "lib/index.d.ts",
"files": [
"lib/**/*.js",
"lib/**/*.d.ts"
],
"bugs": {
"url": "https://github.com/Borewit/strtok3/issues"
},
"devDependencies": {
"@types/chai": "^4.3.1",
"@types/debug": "^4.1.7",
"@types/mocha": "^9.1.0",
"@types/node": "^18.6.3",
"@typescript-eslint/eslint-plugin": "^5.32.0",
"@typescript-eslint/parser": "^5.32.0",
"c8": "^7.12.0",
"chai": "^4.3.6",
"del-cli": "^5.0.0",
"eslint": "^8.21.0",
"eslint-config-prettier": "^8.5.0",
"eslint-import-resolver-typescript": "^3.4.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsdoc": "^39.3.4",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-unicorn": "^43.0.2",
"mocha": "^10.0.0",
"remark-cli": "^11.0.0",
"remark-preset-lint-recommended": "^6.1.2",
"token-types": "^5.0.0",
"ts-node": "^10.9.1",
"typescript": "^4.7.4"
},
"dependencies": {
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.0.0"
},
"keywords": [
"tokenizer",
"reader",
"token",
"async",
"promise",
"parser",
"decoder",
"binary",
"endian",
"uint",
"stream",
"streaming"
]
}

7
node_modules/token-types/LICENSE generated vendored

@ -0,0 +1,7 @@
Copyright 2017 Borewit
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

120
node_modules/token-types/README.md generated vendored

@ -0,0 +1,120 @@
![Node.js CI](https://github.com/Borewit/token-types/workflows/Node.js%20CI/badge.svg)
[![NPM version](https://badge.fury.io/js/token-types.svg)](https://npmjs.org/package/token-types)
[![npm downloads](http://img.shields.io/npm/dm/token-types.svg)](https://npmcharts.com/compare/token-types,strtok3?start=1200&interval=30)
[![coveralls](https://coveralls.io/repos/github/Borewit/token-types/badge.svg?branch=master)](https://coveralls.io/github/Borewit/token-types?branch=master)
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/4723ce4613fc49cda8db5eed29f18834)](https://www.codacy.com/app/Borewit/token-types?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=Borewit/token-types&amp;utm_campaign=Badge_Grade)
[![Language grade: JavaScript](https://img.shields.io/lgtm/grade/javascript/g/Borewit/token-types.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/token-types/context:javascript)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/Borewit/token-types.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/Borewit/token-types/alerts/)
[![DeepScan grade](https://deepscan.io/api/teams/5165/projects/6940/branches/61852/badge/grade.svg)](https://deepscan.io/dashboard#view=project&tid=5165&pid=6940&bid=61852)
[![Known Vulnerabilities](https://snyk.io/test/github/Borewit/token-types/badge.svg?targetFile=package.json)](https://snyk.io/test/github/Borewit/token-types?targetFile=package.json)
# token-types
A primitive token library used to read and write from a node `Buffer`.
Although it is possible to use this module directly, it is primary designed to be used with [strtok3 tokenizer](https://github.com/Borewit/strtok3).
## Compatibility
Module: version 5 migrated from [CommonJS](https://en.wikipedia.org/wiki/CommonJS) to [pure ECMAScript Module (ESM)](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c).
JavaScript is compliant with [ECMAScript 2019 (ES10)](https://en.wikipedia.org/wiki/ECMAScript#10th_Edition_%E2%80%93_ECMAScript_2019).
## Installation
```sh
npm install --save token-types
```
Usually in combination with [strtok3](https://github.com/Borewit/strtok3):
```sh
npm install --save strtok3
```
Using TypeScript you should also install [@tokenizer/token](https://github.com/Borewit/tokenizer-token) as a development
dependency:
```shell
npm install --save-dev @tokenizer/token
```
## Example
```js
import * as strtok3 from 'strtok3';
import * as token from 'token-types';
(async () => {
const tokenizer = await strtok3.fromFile("somefile.bin");
try {
const myNumber = await tokenizer.readToken(token.Float32_BE);
console.log(`My number: ${myNumber}`);
} finally {
tokenizer.close(); // Close the file
}
})();
```
## Tokens
### Numeric tokens
`node-strtok` supports a wide variety of numerical tokens out of the box:
| Token | Number | Bits | Endianness |
|---------------|------------------|------|----------------|
| `UINT8` | Unsigned integer | 8 | n/a |
| `UINT16_BE` | Unsigned integer | 16 | big endian |
| `UINT16_LE` | Unsigned integer | 16 | little endian |
| `UINT24_BE` | Unsigned integer | 24 | big endian |
| `UINT24_LE` | Unsigned integer | 24 | little endian |
| `UINT32_BE` | Unsigned integer | 32 | big endian |
| `UINT32_LE` | Unsigned integer | 32 | little endian |
| `UINT64_BE` | Unsigned integer | 64 | big endian |
| `UINT64_LE`* | Unsigned integer | 64 | little endian |
| `INT8` | Signed integer | 8 | n/a |
| `INT16_BE` | Signed integer | 16 | big endian |
| `INT16_LE` | Signed integer | 16 | little endian |
| `INT24_BE` | Signed integer | 24 | big endian |
| `INT24_LE` | Signed integer | 24 | little endian |
| `INT32_BE` | Signed integer | 32 | big endian |
| `INT32_LE` | Signed integer | 32 | little endian |
| `INT64_BE` | Signed integer | 64 | big endian |
| `INT64_LE`* | Signed integer | 64 | little endian |
| `Float16_BE` | IEEE 754 float | 16 | big endian |
| `Float16_LE` | IEEE 754 float | 16 | little endian |
| `Float32_BE` | IEEE 754 float | 32 | big endian |
| `Float32_LE` | IEEE 754 float | 32 | little endian |
| `Float64_BE` | IEEE 754 float | 64 | big endian |
| `Float64_LE` | IEEE 754 float | 64 | little endian |
| `Float80_BE`* | IEEE 754 float | 80 | big endian |
| `Float80_LE`* | IEEE 754 float | 80 | little endian |
### Other tokens
String types:
* Windows-1252
* ISO-8859-1
*) The tokens exceed the JavaScript IEEE 754 64-bit Floating Point precision, decoding and encoding is best effort based.
### Custom token
Complex tokens can be added, which makes very suitable for reading binary files or network messages:
```js
ExtendedHeader = {
len: 10,
get: (buf, off) => {
return {
// Extended header size
size: Token.UINT32_BE.get(buf, off),
// Extended Flags
extendedFlags: Token.UINT16_BE.get(buf, off + 4),
// Size of padding
sizeOfPadding: Token.UINT32_BE.get(buf, off + 6),
// CRC data present
crcDataPresent: common.strtokBITSET.get(buf, off + 4, 31)
};
}
};
```

@ -0,0 +1,151 @@
/// <reference types="node" resolution-mode="require"/>
import { IToken, IGetToken } from '@tokenizer/token';
import { Buffer } from 'node:buffer';
/**
* 8-bit unsigned integer
*/
export declare const UINT8: IToken<number>;
/**
* 16-bit unsigned integer, Little Endian byte order
*/
export declare const UINT16_LE: IToken<number>;
/**
* 16-bit unsigned integer, Big Endian byte order
*/
export declare const UINT16_BE: IToken<number>;
/**
* 24-bit unsigned integer, Little Endian byte order
*/
export declare const UINT24_LE: IToken<number>;
/**
* 24-bit unsigned integer, Big Endian byte order
*/
export declare const UINT24_BE: IToken<number>;
/**
* 32-bit unsigned integer, Little Endian byte order
*/
export declare const UINT32_LE: IToken<number>;
/**
* 32-bit unsigned integer, Big Endian byte order
*/
export declare const UINT32_BE: IToken<number>;
/**
* 8-bit signed integer
*/
export declare const INT8: IToken<number>;
/**
* 16-bit signed integer, Big Endian byte order
*/
export declare const INT16_BE: IToken<number>;
/**
* 16-bit signed integer, Little Endian byte order
*/
export declare const INT16_LE: IToken<number>;
/**
* 24-bit signed integer, Little Endian byte order
*/
export declare const INT24_LE: IToken<number>;
/**
* 24-bit signed integer, Big Endian byte order
*/
export declare const INT24_BE: IToken<number>;
/**
* 32-bit signed integer, Big Endian byte order
*/
export declare const INT32_BE: IToken<number>;
/**
* 32-bit signed integer, Big Endian byte order
*/
export declare const INT32_LE: IToken<number>;
/**
* 64-bit unsigned integer, Little Endian byte order
*/
export declare const UINT64_LE: IToken<bigint>;
/**
* 64-bit signed integer, Little Endian byte order
*/
export declare const INT64_LE: IToken<bigint>;
/**
* 64-bit unsigned integer, Big Endian byte order
*/
export declare const UINT64_BE: IToken<bigint>;
/**
* 64-bit signed integer, Big Endian byte order
*/
export declare const INT64_BE: IToken<bigint>;
/**
* IEEE 754 16-bit (half precision) float, big endian
*/
export declare const Float16_BE: IToken<number>;
/**
* IEEE 754 16-bit (half precision) float, little endian
*/
export declare const Float16_LE: IToken<number>;
/**
* IEEE 754 32-bit (single precision) float, big endian
*/
export declare const Float32_BE: IToken<number>;
/**
* IEEE 754 32-bit (single precision) float, little endian
*/
export declare const Float32_LE: IToken<number>;
/**
* IEEE 754 64-bit (double precision) float, big endian
*/
export declare const Float64_BE: IToken<number>;
/**
* IEEE 754 64-bit (double precision) float, little endian
*/
export declare const Float64_LE: IToken<number>;
/**
* IEEE 754 80-bit (extended precision) float, big endian
*/
export declare const Float80_BE: IToken<number>;
/**
* IEEE 754 80-bit (extended precision) float, little endian
*/
export declare const Float80_LE: IToken<number>;
/**
* Ignore a given number of bytes
*/
export declare class IgnoreType implements IGetToken<void> {
len: number;
/**
* @param len number of bytes to ignore
*/
constructor(len: number);
get(array: Uint8Array, off: number): void;
}
export declare class Uint8ArrayType implements IGetToken<Uint8Array> {
len: number;
constructor(len: number);
get(array: Uint8Array, offset: number): Uint8Array;
}
export declare class BufferType implements IGetToken<Uint8Array, Buffer> {
len: number;
constructor(len: number);
get(uint8Array: Uint8Array, off: number): Buffer;
}
/**
* Consume a fixed number of bytes from the stream and return a string with a specified encoding.
*/
export declare class StringType implements IGetToken<string, Buffer> {
len: number;
encoding: BufferEncoding;
constructor(len: number, encoding: BufferEncoding);
get(uint8Array: Uint8Array, offset: number): string;
}
/**
* ANSI Latin 1 String
* Using windows-1252 / ISO 8859-1 decoding
*/
export declare class AnsiStringType implements IGetToken<string> {
len: number;
private static windows1252;
private static decode;
private static inRange;
private static codePointToString;
private static singleByteDecoder;
constructor(len: number);
get(buffer: Buffer, offset?: number): string;
}

@ -0,0 +1,449 @@
import * as ieee754 from 'ieee754';
import { Buffer } from 'node:buffer';
// Primitive types
function dv(array) {
return new DataView(array.buffer, array.byteOffset);
}
/**
* 8-bit unsigned integer
*/
export const UINT8 = {
len: 1,
get(array, offset) {
return dv(array).getUint8(offset);
},
put(array, offset, value) {
dv(array).setUint8(offset, value);
return offset + 1;
}
};
/**
* 16-bit unsigned integer, Little Endian byte order
*/
export const UINT16_LE = {
len: 2,
get(array, offset) {
return dv(array).getUint16(offset, true);
},
put(array, offset, value) {
dv(array).setUint16(offset, value, true);
return offset + 2;
}
};
/**
* 16-bit unsigned integer, Big Endian byte order
*/
export const UINT16_BE = {
len: 2,
get(array, offset) {
return dv(array).getUint16(offset);
},
put(array, offset, value) {
dv(array).setUint16(offset, value);
return offset + 2;
}
};
/**
* 24-bit unsigned integer, Little Endian byte order
*/
export const UINT24_LE = {
len: 3,
get(array, offset) {
const dataView = dv(array);
return dataView.getUint8(offset) + (dataView.getUint16(offset + 1, true) << 8);
},
put(array, offset, value) {
const dataView = dv(array);
dataView.setUint8(offset, value & 0xff);
dataView.setUint16(offset + 1, value >> 8, true);
return offset + 3;
}
};
/**
* 24-bit unsigned integer, Big Endian byte order
*/
export const UINT24_BE = {
len: 3,
get(array, offset) {
const dataView = dv(array);
return (dataView.getUint16(offset) << 8) + dataView.getUint8(offset + 2);
},
put(array, offset, value) {
const dataView = dv(array);
dataView.setUint16(offset, value >> 8);
dataView.setUint8(offset + 2, value & 0xff);
return offset + 3;
}
};
/**
* 32-bit unsigned integer, Little Endian byte order
*/
export const UINT32_LE = {
len: 4,
get(array, offset) {
return dv(array).getUint32(offset, true);
},
put(array, offset, value) {
dv(array).setUint32(offset, value, true);
return offset + 4;
}
};
/**
* 32-bit unsigned integer, Big Endian byte order
*/
export const UINT32_BE = {
len: 4,
get(array, offset) {
return dv(array).getUint32(offset);
},
put(array, offset, value) {
dv(array).setUint32(offset, value);
return offset + 4;
}
};
/**
* 8-bit signed integer
*/
export const INT8 = {
len: 1,
get(array, offset) {
return dv(array).getInt8(offset);
},
put(array, offset, value) {
dv(array).setInt8(offset, value);
return offset + 1;
}
};
/**
* 16-bit signed integer, Big Endian byte order
*/
export const INT16_BE = {
len: 2,
get(array, offset) {
return dv(array).getInt16(offset);
},
put(array, offset, value) {
dv(array).setInt16(offset, value);
return offset + 2;
}
};
/**
* 16-bit signed integer, Little Endian byte order
*/
export const INT16_LE = {
len: 2,
get(array, offset) {
return dv(array).getInt16(offset, true);
},
put(array, offset, value) {
dv(array).setInt16(offset, value, true);
return offset + 2;
}
};
/**
* 24-bit signed integer, Little Endian byte order
*/
export const INT24_LE = {
len: 3,
get(array, offset) {
const unsigned = UINT24_LE.get(array, offset);
return unsigned > 0x7fffff ? unsigned - 0x1000000 : unsigned;
},
put(array, offset, value) {
const dataView = dv(array);
dataView.setUint8(offset, value & 0xff);
dataView.setUint16(offset + 1, value >> 8, true);
return offset + 3;
}
};
/**
* 24-bit signed integer, Big Endian byte order
*/
export const INT24_BE = {
len: 3,
get(array, offset) {
const unsigned = UINT24_BE.get(array, offset);
return unsigned > 0x7fffff ? unsigned - 0x1000000 : unsigned;
},
put(array, offset, value) {
const dataView = dv(array);
dataView.setUint16(offset, value >> 8);
dataView.setUint8(offset + 2, value & 0xff);
return offset + 3;
}
};
/**
* 32-bit signed integer, Big Endian byte order
*/
export const INT32_BE = {
len: 4,
get(array, offset) {
return dv(array).getInt32(offset);
},
put(array, offset, value) {
dv(array).setInt32(offset, value);
return offset + 4;
}
};
/**
* 32-bit signed integer, Big Endian byte order
*/
export const INT32_LE = {
len: 4,
get(array, offset) {
return dv(array).getInt32(offset, true);
},
put(array, offset, value) {
dv(array).setInt32(offset, value, true);
return offset + 4;
}
};
/**
* 64-bit unsigned integer, Little Endian byte order
*/
export const UINT64_LE = {
len: 8,
get(array, offset) {
return dv(array).getBigUint64(offset, true);
},
put(array, offset, value) {
dv(array).setBigUint64(offset, value, true);
return offset + 8;
}
};
/**
* 64-bit signed integer, Little Endian byte order
*/
export const INT64_LE = {
len: 8,
get(array, offset) {
return dv(array).getBigInt64(offset, true);
},
put(array, offset, value) {
dv(array).setBigInt64(offset, value, true);
return offset + 8;
}
};
/**
* 64-bit unsigned integer, Big Endian byte order
*/
export const UINT64_BE = {
len: 8,
get(array, offset) {
return dv(array).getBigUint64(offset);
},
put(array, offset, value) {
dv(array).setBigUint64(offset, value);
return offset + 8;
}
};
/**
* 64-bit signed integer, Big Endian byte order
*/
export const INT64_BE = {
len: 8,
get(array, offset) {
return dv(array).getBigInt64(offset);
},
put(array, offset, value) {
dv(array).setBigInt64(offset, value);
return offset + 8;
}
};
/**
* IEEE 754 16-bit (half precision) float, big endian
*/
export const Float16_BE = {
len: 2,
get(dataView, offset) {
return ieee754.read(dataView, offset, false, 10, this.len);
},
put(dataView, offset, value) {
ieee754.write(dataView, value, offset, false, 10, this.len);
return offset + this.len;
}
};
/**
* IEEE 754 16-bit (half precision) float, little endian
*/
export const Float16_LE = {
len: 2,
get(array, offset) {
return ieee754.read(array, offset, true, 10, this.len);
},
put(array, offset, value) {
ieee754.write(array, value, offset, true, 10, this.len);
return offset + this.len;
}
};
/**
* IEEE 754 32-bit (single precision) float, big endian
*/
export const Float32_BE = {
len: 4,
get(array, offset) {
return dv(array).getFloat32(offset);
},
put(array, offset, value) {
dv(array).setFloat32(offset, value);
return offset + 4;
}
};
/**
* IEEE 754 32-bit (single precision) float, little endian
*/
export const Float32_LE = {
len: 4,
get(array, offset) {
return dv(array).getFloat32(offset, true);
},
put(array, offset, value) {
dv(array).setFloat32(offset, value, true);
return offset + 4;
}
};
/**
* IEEE 754 64-bit (double precision) float, big endian
*/
export const Float64_BE = {
len: 8,
get(array, offset) {
return dv(array).getFloat64(offset);
},
put(array, offset, value) {
dv(array).setFloat64(offset, value);
return offset + 8;
}
};
/**
* IEEE 754 64-bit (double precision) float, little endian
*/
export const Float64_LE = {
len: 8,
get(array, offset) {
return dv(array).getFloat64(offset, true);
},
put(array, offset, value) {
dv(array).setFloat64(offset, value, true);
return offset + 8;
}
};
/**
* IEEE 754 80-bit (extended precision) float, big endian
*/
export const Float80_BE = {
len: 10,
get(array, offset) {
return ieee754.read(array, offset, false, 63, this.len);
},
put(array, offset, value) {
ieee754.write(array, value, offset, false, 63, this.len);
return offset + this.len;
}
};
/**
* IEEE 754 80-bit (extended precision) float, little endian
*/
export const Float80_LE = {
len: 10,
get(array, offset) {
return ieee754.read(array, offset, true, 63, this.len);
},
put(array, offset, value) {
ieee754.write(array, value, offset, true, 63, this.len);
return offset + this.len;
}
};
/**
* Ignore a given number of bytes
*/
export class IgnoreType {
/**
* @param len number of bytes to ignore
*/
constructor(len) {
this.len = len;
}
// ToDo: don't read, but skip data
// eslint-disable-next-line @typescript-eslint/no-empty-function
get(array, off) {
}
}
export class Uint8ArrayType {
constructor(len) {
this.len = len;
}
get(array, offset) {
return array.subarray(offset, offset + this.len);
}
}
export class BufferType {
constructor(len) {
this.len = len;
}
get(uint8Array, off) {
return Buffer.from(uint8Array.subarray(off, off + this.len));
}
}
/**
* Consume a fixed number of bytes from the stream and return a string with a specified encoding.
*/
export class StringType {
constructor(len, encoding) {
this.len = len;
this.encoding = encoding;
}
get(uint8Array, offset) {
return Buffer.from(uint8Array).toString(this.encoding, offset, offset + this.len);
}
}
/**
* ANSI Latin 1 String
* Using windows-1252 / ISO 8859-1 decoding
*/
export class AnsiStringType {
constructor(len) {
this.len = len;
}
static decode(buffer, offset, until) {
let str = '';
for (let i = offset; i < until; ++i) {
str += AnsiStringType.codePointToString(AnsiStringType.singleByteDecoder(buffer[i]));
}
return str;
}
static inRange(a, min, max) {
return min <= a && a <= max;
}
static codePointToString(cp) {
if (cp <= 0xFFFF) {
return String.fromCharCode(cp);
}
else {
cp -= 0x10000;
return String.fromCharCode((cp >> 10) + 0xD800, (cp & 0x3FF) + 0xDC00);
}
}
static singleByteDecoder(bite) {
if (AnsiStringType.inRange(bite, 0x00, 0x7F)) {
return bite;
}
const codePoint = AnsiStringType.windows1252[bite - 0x80];
if (codePoint === null) {
throw Error('invaliding encoding');
}
return codePoint;
}
get(buffer, offset = 0) {
return AnsiStringType.decode(buffer, offset, offset + this.len);
}
}
AnsiStringType.windows1252 = [8364, 129, 8218, 402, 8222, 8230, 8224, 8225, 710, 8240, 352,
8249, 338, 141, 381, 143, 144, 8216, 8217, 8220, 8221, 8226, 8211, 8212, 732,
8482, 353, 8250, 339, 157, 382, 376, 160, 161, 162, 163, 164, 165, 166, 167, 168,
169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200,
201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
248, 249, 250, 251, 252, 253, 254, 255];

@ -0,0 +1,87 @@
{
"name": "token-types",
"version": "5.0.1",
"description": "Common token types for decoding and encoding numeric and string values",
"author": {
"name": "Borewit",
"url": "https://github.com/Borewit"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
},
"scripts": {
"clean": "del-cli lib/**/*.js lib/***.js.map *.d.ts test/**/*.d.ts test/**/*.js test/**/*.js.map .nyc_output",
"build": "npm run compile",
"compile-src": "tsc --p lib",
"compile-test": "tsc --p test",
"compile": "npm run compile-src && npm run compile-test",
"eslint": "eslint lib test --ext .ts --ignore-pattern *.d.ts",
"lint-ts": "tslint lib/index.ts --exclude '*.d.ts' 'test/**/*.ts' --exclude 'test/**/*.d.ts'",
"lint-md": "remark -u preset-lint-recommended .",
"lint": "npm run lint-md && npm run eslint",
"test": "mocha",
"test-coverage": "c8 npm run test",
"send-codacy": "c8 report --reports-dir=./.coverage --reporter=text-lcov | codacy-coverage"
},
"engines": {
"node": ">=14.16"
},
"repository": {
"type": "git",
"url": "https://github.com/Borewit/token-types"
},
"files": [
"lib/index.js",
"lib/index.d.ts"
],
"license": "MIT",
"type": "module",
"exports": "./lib/index.js",
"types": "lib/index.d.ts",
"bugs": {
"url": "https://github.com/Borewit/token-types/issues"
},
"devDependencies": {
"@types/chai": "^4.3.1",
"@types/mocha": "^9.1.0",
"@types/node": "^18.6.3",
"@typescript-eslint/eslint-plugin": "^5.32.0",
"@typescript-eslint/parser": "^5.32.0",
"c8": "^7.12.0",
"chai": "^4.3.6",
"del-cli": "^5.0.0",
"eslint": "^8.9.0",
"eslint-config-prettier": "^8.5.0",
"eslint-import-resolver-typescript": "^3.4.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsdoc": "^39.3.4",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-unicorn": "^43.0.2",
"mocha": "^10.0.0",
"remark-cli": "^11.0.0",
"remark-preset-lint-recommended": "^6.1.2",
"source-map-support": "^0.5.21",
"ts-node": "^10.9.1",
"typescript": "^4.7.4"
},
"dependencies": {
"@tokenizer/token": "^0.3.0",
"ieee754": "^1.2.1"
},
"remarkConfig": {
"plugins": [
"preset-lint-recommended"
]
},
"keywords": [
"token",
"integer",
"unsigned",
"numeric",
"float",
"IEEE",
"754",
"strtok3"
]
}

156
package-lock.json generated

@ -36,10 +36,12 @@
"fastify-multipart": "^5.4.0",
"fastify-static": "^4.7.0",
"fastify-swagger": "^5.2.0",
"file-type": "^18.5.0",
"form-data": "^2.3.3",
"fs": "^0.0.1-security",
"generate-password": "^1.6.0",
"gridfile": "^1.1.3",
"image-type": "^5.2.0",
"libphonenumber-js": "^1.9.6",
"moment": "^2.29.4",
"moment-timezone": "^0.5.43",
@ -3309,6 +3311,11 @@
"url": "https://github.com/sponsors/ueberdosis"
}
},
"node_modules/@tokenizer/token": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz",
"integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="
},
"node_modules/@tootallnate/once": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
@ -5767,6 +5774,22 @@
"xtend": "^4.0.0"
}
},
"node_modules/file-type": {
"version": "18.5.0",
"resolved": "https://registry.npmjs.org/file-type/-/file-type-18.5.0.tgz",
"integrity": "sha512-yvpl5U868+V6PqXHMmsESpg6unQ5GfnPssl4dxdJudBrr9qy7Fddt7EVX1VLlddFfe8Gj9N7goCZH22FXuSQXQ==",
"dependencies": {
"readable-web-to-node-stream": "^3.0.2",
"strtok3": "^7.0.0",
"token-types": "^5.0.1"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"url": "https://github.com/sindresorhus/file-type?sponsor=1"
}
},
"node_modules/file-uri-to-path": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-2.0.0.tgz",
@ -6629,6 +6652,20 @@
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
},
"node_modules/image-type": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/image-type/-/image-type-5.2.0.tgz",
"integrity": "sha512-f0+6qHeGfyEh1HhFGPUWZb+Dqqm6raKeeAR6Opt01wBBIQL32/1wpZkPQm8gcliB/Ws6oiX2ofFYXB57+CV0iQ==",
"dependencies": {
"file-type": "^18.1.0"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/immediate": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
@ -8351,6 +8388,18 @@
"through": "~2.3"
}
},
"node_modules/peek-readable": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz",
"integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==",
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
@ -9079,6 +9128,21 @@
"node": ">= 6"
}
},
"node_modules/readable-web-to-node-stream": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz",
"integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==",
"dependencies": {
"readable-stream": "^3.6.0"
},
"engines": {
"node": ">=8"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/readdir-glob": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz",
@ -9962,6 +10026,25 @@
"node": ">=0.10.0"
}
},
<<<<<<< HEAD
=======
"node_modules/strtok3": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz",
"integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==",
"dependencies": {
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.0.0"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
>>>>>>> 127251e2ebfc5c15d56a359b7f12754359034ee7
"node_modules/stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
@ -10281,6 +10364,22 @@
"node": ">=0.6"
}
},
"node_modules/token-types": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz",
"integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==",
"dependencies": {
"@tokenizer/token": "^0.3.0",
"ieee754": "^1.2.1"
},
"engines": {
"node": ">=14.16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/touch": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",
@ -13490,6 +13589,11 @@
"@tiptap/extension-text": "^2.0.0-beta.209"
}
},
"@tokenizer/token": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz",
"integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="
},
"@tootallnate/once": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
@ -15486,6 +15590,16 @@
"xtend": "^4.0.0"
}
},
"file-type": {
"version": "18.5.0",
"resolved": "https://registry.npmjs.org/file-type/-/file-type-18.5.0.tgz",
"integrity": "sha512-yvpl5U868+V6PqXHMmsESpg6unQ5GfnPssl4dxdJudBrr9qy7Fddt7EVX1VLlddFfe8Gj9N7goCZH22FXuSQXQ==",
"requires": {
"readable-web-to-node-stream": "^3.0.2",
"strtok3": "^7.0.0",
"token-types": "^5.0.1"
}
},
"file-uri-to-path": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-2.0.0.tgz",
@ -16151,6 +16265,14 @@
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
},
"image-type": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/image-type/-/image-type-5.2.0.tgz",
"integrity": "sha512-f0+6qHeGfyEh1HhFGPUWZb+Dqqm6raKeeAR6Opt01wBBIQL32/1wpZkPQm8gcliB/Ws6oiX2ofFYXB57+CV0iQ==",
"requires": {
"file-type": "^18.1.0"
}
},
"immediate": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
@ -17486,6 +17608,11 @@
"through": "~2.3"
}
},
"peek-readable": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz",
"integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A=="
},
"performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
@ -18031,6 +18158,14 @@
"util-deprecate": "^1.0.1"
}
},
"readable-web-to-node-stream": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz",
"integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==",
"requires": {
"readable-stream": "^3.6.0"
}
},
"readdir-glob": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz",
@ -18735,6 +18870,18 @@
"is-utf8": "^0.2.0"
}
},
<<<<<<< HEAD
=======
"strtok3": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz",
"integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==",
"requires": {
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.0.0"
}
},
>>>>>>> 127251e2ebfc5c15d56a359b7f12754359034ee7
"stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
@ -18994,6 +19141,15 @@
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="
},
"token-types": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz",
"integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==",
"requires": {
"@tokenizer/token": "^0.3.0",
"ieee754": "^1.2.1"
}
},
"touch": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",

@ -37,10 +37,12 @@
"fastify-multipart": "^5.4.0",
"fastify-static": "^4.7.0",
"fastify-swagger": "^5.2.0",
"file-type": "^18.5.0",
"form-data": "^2.3.3",
"fs": "^0.0.1-security",
"generate-password": "^1.6.0",
"gridfile": "^1.1.3",
"image-type": "^5.2.0",
"libphonenumber-js": "^1.9.6",
"moment": "^2.29.4",
"moment-timezone": "^0.5.43",

@ -0,0 +1,13 @@
{
"type": "service_account",
"project_id": "arminta-tank",
"private_key_id": "b97cfb067a68e88197dd4b578de8baefe739ef82",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCvtJCiabxU2rI+\nhEkjVEu/0nHv+vyU3O2x/3K2sJXMVHvwmRpncOfLXsveyIOi69wUkdTMQ2ghCQBA\nwYzcX0eQ7Txq8AuuksHTanGP4ZZgHuS2YjlQe67BYbMsA/B7DwcSKypYXmw2C8aF\nraXYO8xq1lltB2x6d8CEBttBFay3mHH2M2H+a+0emFuOp0IgurNZ69sWblx9GKpM\nx4D+gxGPT400PO/TDt799Z2hXRa5262bA4MV8v6jzq4Q8KN8+Systbhv+1a83AWd\n+M21ffMC4HmvNG4JqAefoBAl4uQpvHKVo4QNZvV+bP99nZ4Zj95fED0vTC+GptZ5\no8wG5p4BAgMBAAECggEACqU8Ud3JhYPcUqu3xXBkGES5JuDAXMnTrDIZnnpdkhQg\nj/3Ltu6VGaoWPKrOCHEFPBkPkTpoiL9bjMDZTqkGbFBGVcWSVJ7zR8KSS7UPlAZt\nJaR2++KJfpGTfihFMcSB4Xq8aZ8ba+0b16GzK5IobwTqhT/C1ZrzruAeOL/WH4eu\nxmp5PeXdUVaP6eF+5fzaBFxAo2uNSyhe8ckFXiYO/tmmUBriEc/NDs8/mbpkqMM8\nCcIaVt20eelue6pYXh/3PEPcYFGSISbY1gzQXinvH8keT0YckG+VcKYl+LKaLcz0\nYVw6oJf/0b4UUOH/tpdVDxzSslYap8DRPBhwEkjTUQKBgQD3kZolBkipD7fimEOg\neq/U6CQ5NhBGAMyMztlGZpOHVPYXrf5IVSEuhJpb/vfwaz2PzJuJvlf7Zh8B89dL\nKCLbHKSRA6pDfcNCMmco7FjV+pDqWgZN6emf7QoSdwcjLzU7lcFz3EhCRSji8q7O\nc/eMuXlk1o+ci/QUnFrWP7xGcQKBgQC1sG4tL5PEu2mzuH9YO03kSzMpD14mjtKS\n8U9wgG6jW6DGdxnp8kkSRIJ1GNPQ2dQe9Y1PMX3K6ab9zl1yK7C4UzX8o1n0oLrN\nt97WsWAej8WdazoOJjSlwvK6EL0tet49skulUOaVGXandYkxOmomp8K9LWVQst6h\nrqDH7Ho4kQKBgQDhFq7xrs7td28XRJ1oEWRAzXfx1GfjXeKcu4zs52KHDwZY7Ude\n+Jectc+qiR1gqABm9/8Ks9dGqcjTe6JpoOpgEaDGgHCTwIqBa1ZD7O+wTYroqEVH\nHb/eOiSIOVRx9wANVtF2x/ei2UKdZUzuRJ30gsYOms4j5VNcUfR9C6Hg4QKBgAu0\naASlMgHUiQS4+As5FtWEOrGEee5F+S7aOOHohDgl6P+gIJ0/rJFutHDSvqKPxeiQ\nrR/S8mWmE8n6B28ZfXMCeVHjPCJUUHR8SfDO0IBnhrWTeNP8hhu/qV9vQKCzb6nN\n2K23voMQduJm4gW326rN1+/UAtjmDQ2hcF4wz9bhAoGBAJVeHGgDg+JV0nGBIbTG\nEYxNR4FsJlOZWpMvZAbtQDMiysBJeQpYjsMfDKJE55q2BbXosd4QG5sGGO2g+DTb\nQNy/OuY7hBjbq5kQDxCTQoaQGQQHBr5Ya3H/NbnUUA4vyCHsyKtUU3H2Yy/8JW5F\nTNwSI7WOrXbCFCtpbqKwQT3U\n-----END PRIVATE KEY-----\n",
"client_email": "515216557013-compute@developer.gserviceaccount.com",
"client_id": "101537158222215816036",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/515216557013-compute%40developer.gserviceaccount.com",
"universe_domain": "googleapis.com"
}

@ -11,6 +11,16 @@ const cron = require('node-cron');
const moment = require('moment');
async function deleteOldRecords() {
const SEVEN_DAYS_IN_MILLISECONDS = 7 * 24 * 60 * 60 * 1000;
const sevenDaysAgo = new Date(Date.now() - SEVEN_DAYS_IN_MILLISECONDS);
await MotorData.deleteMany({ startTime: { $lt: sevenDaysAgo } });
}
exports.addTanks = async (req, reply) => {
try {
@ -250,32 +260,51 @@ exports.getTanklevels = async (req, reply) => {
};
const intervals = {};
let sump_water_levels=[];
let supplier_tanks = [];
exports.motorAction = async (req, reply) => {
try {
let start_time,stop_time
7
const customerId = req.params.customerId;
const action = req.body.action
const receiver_tank = req.body.to
const receiver_tank_info = await Tank.findOne({ customerId ,tankName:receiver_tank,tankLocation:(req.body.to_type).toLowerCase()});
const receiver_capacity = parseInt((receiver_tank_info.capacity).replace(/,/g, ''), 10)
const desired_water_percentage = parseInt((req.body.percentage).replace(/,/g, ''), 10)
const supplier_tank = req.body.from
const supplier_tank_type = (req.body.from_type).toLowerCase()
const receiver_type = (req.body.to_type).toLowerCase()
console.log(supplier_tank)
const interval_variable = supplier_tank+receiver_tank
// console.log(supplier_tank)
// const suplr_tank_info1 = await Tank.findOne({ customerId ,tankName:supplier_tank,tankLocation:supplier_tank_type});
const interval_variable = supplier_tank+receiver_tank
let currentTank = supplier_tanks.find(tank => tank.supplier_tank === supplier_tank);
let currentSump = sump_water_levels.find(tank => tank.supplier_tank === supplier_tank);
if(action === "start"){
start_time = new Date().toLocaleString('en-US', {timeZone: 'Asia/Kolkata'})
if (!currentTank) {
currentTank = {
supplier_tank: supplier_tank,
start_time: new Date().toLocaleString('en-US', { timeZone: 'Asia/Kolkata' })
};
supplier_tanks.push(currentTank);
}
// start_time = new Date().toLocaleString('en-US', {timeZone: 'Asia/Kolkata'})
console.log(supplier_tanks)
// const stop_at = req.body.stop_at
if(supplier_tank_type==="sump" && receiver_type === "overhead"){
await Tank.findOneAndUpdate({customerId, tankName: receiver_tank,tankLocation:receiver_type}, { $set: { motor_status: "1" } });
const supplier_tank_info1 = await Tank.findOne({ customerId ,tankName:supplier_tank,tankLocation:supplier_tank_type});
@ -283,7 +312,23 @@ exports.motorAction = async (req, reply) => {
//const initial_update = parseInt(supplier_tank_info1.waterlevel.replace(/,/g, ''), 10)-200;
// await Tank.findOneAndUpdate({customerId, tankName: supplier_tank,tankLocation:supplier_tank_type}, { $set: { waterlevel: initial_update } });
const supplier_tank_info = await Tank.findOne({ customerId ,tankName:supplier_tank,tankLocation:supplier_tank_type});
const sump_water_level= parseInt(supplier_tank_info.waterlevel.replace(/,/g, ''), 10)
const receiver_tank_info2 = await Tank.findOne({ customerId ,tankName:receiver_tank,tankLocation:(req.body.to_type).toLowerCase()});
const water_added_from_midnight = parseInt((receiver_tank_info2.total_water_added_from_midnight).replace(/,/g, ''), 10)
if (!currentSump) {
currentSump = {
supplier_tank: supplier_tank,
supplier_initial_waterlevel:sump_water_level,
receiver_tank_total_water_added_from_midnight:water_added_from_midnight
};
sump_water_levels.push(currentSump);
}
console.log(sump_water_levels)
const overheadTank = await Tank.findOne({ customerId, tankName: receiver_tank, tankLocation: receiver_type });
const connection = overheadTank.connections.inputConnections.find((conn) => conn.inputConnections === supplier_tank);
@ -412,8 +457,6 @@ exports.motorAction = async (req, reply) => {
// }
if(supplier_tank_type==="sump" && receiver_type === "sump"){
const receiver_capacity = parseInt(receiver_tank_info.capacity.replace(/,/g, ''), 10)
@ -563,10 +606,47 @@ exports.motorAction = async (req, reply) => {
}
else if (action === "stop") {
stop_time = new Date().toLocaleString('en-US', {timeZone: 'Asia/Kolkata'})
//stop_time = new Date().toLocaleString('en-US', {timeZone: 'Asia/Kolkata'})
clearInterval(intervals[interval_variable]); // Clear the interval for this tank
delete intervals[interval_variable];
const stopTime = new Date().toLocaleString('en-US', { timeZone: 'Asia/Kolkata' });
// console.log(currentTank.start_time)
const startTime = currentTank.start_time;
// const duration = calculateDuration(startTime, stopTime);
// Store the duration or perform any required operations
supplier_tanks = supplier_tanks.filter(tank => tank.supplier_tank !== supplier_tank);
// console.log(supplier_tanks)
// storing data of how amny water supplied from sump to overhead to calculate the consumption
const suplr_tank_info2 = await Tank.findOne({ customerId ,tankName:supplier_tank,tankLocation:supplier_tank_type});
let water_added_from_midnight1=0
if (supplier_tank_type === "sump") {
// const rcvr_info2 = await Tank.findOne({ customerId ,tankName:receiver_tank,tankLocation:receiver_type});
console.log(currentSump.receiver_tank_total_water_added_from_midnight,"5")
water_added_from_midnight1=currentSump.receiver_tank_total_water_added_from_midnight
console.log(water_added_from_midnight1)
}
const sump_water_level1 =currentSump.supplier_initial_waterlevel
console.log(sump_water_level1,"1")
// console.log(water_added_from_midnight)
const sump_final_water_level= parseInt(suplr_tank_info2.waterlevel.replace(/,/g, ''), 10)
console.log(sump_final_water_level,"2")
sump_water_levels = sump_water_levels.filter(tank => tank.supplier_tank !== supplier_tank);
const quantity_delivered = sump_water_level1-sump_final_water_level
if (supplier_tank_type === "sump") {
final_added_water=water_added_from_midnight1+quantity_delivered
await Tank.findOneAndUpdate({customerId, tankName: receiver_tank,tankLocation:receiver_type}, { $set: { total_water_added_from_midnight: final_added_water } })
}
const overheadTank = await Tank.findOne({ customerId, tankName: receiver_tank, tankLocation: receiver_type });
const connection = overheadTank.connections.inputConnections.find((conn) => conn.inputConnections === supplier_tank);
@ -579,27 +659,29 @@ exports.motorAction = async (req, reply) => {
const tankToUpdate = await Tank.findOne({ customerId, tankName: receiver_tank, tankLocation: receiver_type });
// Check if all objects in inputConnections have motor_status === "0"
const allMotorStatusZero = tankToUpdate.connections.inputConnections.every(connection => connection.motor_status === "0");
const allMotorStatusZero = tankToUpdate.connections.inputConnections.every(connection => connection.motor_status === "0");
if (allMotorStatusZero) {
if (allMotorStatusZero) {
console.log(allMotorStatusZero)
// Update the motor_status field to "0" for the tank
await Tank.findOneAndUpdate(
{ customerId, tankName: receiver_tank, tankLocation: receiver_type },
{ $set: { motor_status: "0" } }
await Tank.findOneAndUpdate(
{ customerId, tankName: receiver_tank, tankLocation: receiver_type },
{ $set: { motor_status: "0" } }
);
}
motorData = {
//saving the motor run time and data
motorData = {
customerId:customerId,
supplierTank : supplier_tank,
supplier_type: supplier_tank_type,
receiverTank: receiver_tank,
receiver_type: receiver_type,
// startTime: startTime,
stopTime: req.body.stopTime,
startTime: startTime,
stopTime: stopTime,
quantity_delivered:quantity_delivered
@ -615,10 +697,11 @@ exports.motorAction = async (req, reply) => {
motorData.receiverTank = receiver_tank;
motorData.supplier_type = supplier_type;
motorData.receiver_type = receiver_type;
//motorData.startTime = startTime;
motorData.stopTime = stop_time;
motorData.startTime = startTime;
motorData.stopTime = stopTime;
motorData.quantity_delivered = quantity_delivered;
}
const motor_data = await motorData.save();
console.log(motor_data)
@ -628,7 +711,7 @@ exports.motorAction = async (req, reply) => {
// reply.send({ status_code: 200, "start time": start_time, data: motor_data});
console.log(start_time)
// console.log(start_time)
// return motor_data
@ -638,7 +721,7 @@ exports.motorAction = async (req, reply) => {
// await Tank.findOneAndUpdate({customerId, tankName: receiver_tank,tankLocation:(req.body.to_type).toLowerCase()}, { $set: { motor_status: "0" } });
reply.send({ status_code: 200, "stop time": stop_time,data: motor_data});
reply.send({ status_code: 200, "stop time": stopTime,data: motor_data});
} else {
throw new Error("Invalid action");
}
@ -1065,3 +1148,26 @@ exports.updatewaterlevelsatmidnight = async (req, reply) => {
throw boom.boomify(err);
}
};
exports.deletemotordatarecordsbefore7days = async (req, reply) => {
try {
// Schedule the task to run every day at 10 seconds past the minute
cron.schedule('0 0 * * *', async () => {
try {
// Run the deletion task once a day
setInterval(async () => {
await deleteOldRecords();
}, 24 * 60 * 60 * 1000); // 24 hours in milliseconds
} catch (error) {
console.error('Error occurred:', error);
}
});
} catch (err) {
throw boom.boomify(err);
}
};

@ -1229,55 +1229,333 @@ exports.getconnectedCustomers = async (req, reply) => {
// };
const multer = require('multer');
const fs = require('fs');
// const multer = require('multer');
// const fs = require('fs');
// // Multer storage configuration
// const storage = multer.diskStorage({
// destination: function (req, file, cb) {
// // Specify the destination folder for storing uploaded files
// cb(null, './uploads');
// },
// filename: function (req, file, cb) {
// // Generate a unique filename for the uploaded file
// cb(null, file.originalname);
// },
// });
// // Multer upload configuration
// const upload = multer({ storage: storage }).single('picture');
// Multer storage configuration
const storage = multer.diskStorage({
destination: function (req, file, cb) {
// Specify the destination folder for storing uploaded files
cb(null, './uploads');
},
filename: function (req, file, cb) {
// Generate a unique filename for the uploaded file
cb(null, file.originalname);
},
// // Handler for uploading profile picture
// exports.uploadProfilePicture = async (req, res) => {
// try {
// upload(req, res, async (err) => {
// if (err) {
// return res.status(400).send({ error: 'Failed to upload profile picture' });
// }
// const supplierId = req.params.supplierId;
// const picture = req.file.filename;
// let profilePicture = await ProfilePictureSupplier.findOne({ supplierId });
// if (!profilePicture) {
// profilePicture = new ProfilePictureSupplier({
// supplierId,
// picture,
// });
// } else {
// profilePicture.picture = picture;
// }
// await profilePicture.save();
// res.send({ message: 'Profile picture uploaded successfully' });
// });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// };
// const { Storage } = require('@google-cloud/storage');
// const storage = new Storage({
// keyFilename :`/home/bhaskar/Downloads/arminta-tank-0f492875da39.json`,
// projectId : `arminta-tank`
// });
// const bucketName = 'armintaprofile_picture';
// exports.uploadProfilePicture = async (req, res) => {
// try {
// const supplierId = req.params.supplierId;
// const file = req.body.picture;
// // Check if the required properties are present
// if (!file) {
// return res.status(400).send({ error: 'No picture file provided' });
// }
// // Upload the file to the GCS bucket
// const bucket = storage.bucket(bucketName);
// const fileName = `${supplierId}/${file.originalname}`;
// const fileOptions = {
// metadata: {
// contentType: 'multipart/form-data',
// },
// };
// await bucket.upload(file.path, {
// destination: `armintaprofile_picture/${fileName}`,
// ...fileOptions,
// });
// const imageUrl = `https://storage.googleapis.com/${bucketName}/${fileName}`;
// console.log(imageUrl);
// let profilePicture = await ProfilePictureSupplier.findOne({ supplierId });
// console.log("profilePicture", profilePicture);
// if (!profilePicture) {
// profilePicture = new ProfilePictureSupplier({
// supplierId,
// picture: imageUrl,
// });
// } else {
// profilePicture.picture = imageUrl;
// }
// await profilePicture.save();
// // Delete the temporary file after uploading to GCS
// fs.unlinkSync(file.path);
// res.send({ message: 'Profile picture uploaded successfully' });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// },
// const { Storage } = require('@google-cloud/storage');
// const storage = new Storage({
// keyFilename: '/home/bhaskar/Downloads/arminta-tank-0f492875da39.json',
// projectId: 'arminta-tank',
// });
// const bucketName = 'armintaprofile_picture';
// exports.uploadProfilePicture = async (req, res) => {
// try {
// const supplierId = req.params.supplierId;
// // Check if the required properties are present
// if (!req.files || !req.files.picture) {
// return res.status(400).send({ error: 'No picture file provided' });
// }
// const file = req.files.picture;
// console.log("file", file)
// // Upload the file to the GCS bucket
// const bucket = storage.bucket(bucketName);
// const fileName = `${supplierId}/${file.name}`;
// const fileOptions = {
// metadata: {
// contentType: file.mimetype,
// },
// };
// await bucket.upload(file.tempFilePath, {
// destination: fileName,
// ...fileOptions,
// });
// const imageUrl = `https://storage.googleapis.com/${bucketName}/${fileName}`;
// console.log(imageUrl);
// let profilePicture = await ProfilePictureSupplier.findOne({ supplierId });
// console.log('profilePicture', profilePicture);
// if (!profilePicture) {
// profilePicture = new ProfilePictureSupplier({
// supplierId,
// picture: imageUrl,
// });
// } else {
// profilePicture.picture = imageUrl;
// }
// await profilePicture.save();
// // Delete the temporary file after uploading to GCS
// fs.unlinkSync(file.tempFilePath);
// res.send({ message: 'Profile picture uploaded successfully' });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// };
const { Storage } = require('@google-cloud/storage');
const fs = require('fs');
const storage = new Storage({
keyFilename: '/home/bhaskar/Downloads/arminta-tank-3c665db761bc.json',
projectId: 'arminta-tank',
});
// Multer upload configuration
const upload = multer({ storage: storage }).single('picture');
const bucketName = 'armintaprofile_picture';
// Handler for uploading profile picture
exports.uploadProfilePicture = async (req, res) => {
try {
upload(req, res, async (err) => {
if (err) {
return res.status(400).send({ error: 'Failed to upload profile picture' });
}
// exports.uploadProfilePicture = async (req, res) => {
// try {
// const supplierId = req.body.supplierId;
const supplierId = req.params.supplierId;
const picture = req.file.filename; // Assuming the file field in the request is named 'picture'
// // Check if the required properties are present
// if (!req.files || !req.files.picture) {
// return res.status(400).send({ error: 'No picture file provided' });
// }
let profilePicture = await ProfilePictureSupplier.findOne({ supplierId });
// const file = `/home/bhaskar/Desktop/download.jpeg`;
// console.log("file", file)
// console.log("supplierId", supplierId)
// // Upload the file to the GCS bucket
// const bucket = storage.bucket(bucketName);
// const fileName = `${supplierId}/${file.name}`;
// const fileOptions = {
// metadata: {
// contentType: file.mimetype,
// },
// };
// await bucket.upload(file.tempFilePath, {
// destination: fileName,
// ...fileOptions,
// });
if (!profilePicture) {
profilePicture = new ProfilePictureSupplier({
supplierId,
picture,
});
} else {
profilePicture.picture = picture;
}
// const imageUrl = `https://storage.googleapis.com/${bucketName}/${fileName}`;
await profilePicture.save();
// let profilePicture = await profilePictureSupplier.findOne({ supplierId });
res.send({ message: 'Profile picture uploaded successfully' });
});
} catch (error) {
res.status(500).send({ error: error.message });
}
};
// if (!profilePicture) {
// profilePicture = new profilePictureSupplier({
// supplierId,
// picture: imageUrl,
// });
// } else {
// profilePicture.picture = imageUrl;
// }
// await profilePicture.save();
// // Delete the temporary file after uploading to GCS
// fs.unlinkSync(file.tempFilePath);
// res.send({ message: 'Profile picture uploaded successfully' });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// };
// exports.uploadProfilePicture = async (req, res) => {
// try {
// upload(req, res, async (err) => {
// if (err) {
// return res.status(400).send({ error: 'Failed to upload profile picture' });
// }
// const supplierId = req.params.supplierId;
// const picture = req.file; // Assuming the file field in the request is named 'picture'
// // Upload the file to the GCS bucket
// const bucket = storage.bucket(bucketName);
// const fileName = `${supplierId}/${file.originalname}`;
// const fileOptions = {
// metadata: {
// contentType: file.mimetype,
// },
// };
// await bucket.upload(filepath, {
// destination: `armintaprofile_picture/${picture}`,
// ...fileOptions,
// });
// const imageUrl = `https://storage.googleapis.com/${bucketName}/${fileName}`;
// console.log(imageUrl)
// let profilePicture = await ProfilePictureSupplier.findOne({ supplierId });
// console.log("profilePicture", profilePicture)
// if (!profilePicture) {
// profilePicture = new ProfilePictureSupplier({
// supplierId,
// picture: imageUrl,
// });
// } else {
// profilePicture.picture = imageUrl;
// }
// await profilePicture.save();
// // Delete the temporary file after uploading to GCS
// fs.unlinkSync(file.path);
// res.send({ message: 'Profile picture uploaded successfully' });
// });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// };
// exports.uploadProfilePicture = async (req, res) => {
// try {
// upload(req, res, async (err) => {
// if (err) {
// return res.status(400).send({ error: 'Failed to upload profile picture' });
// }
// const supplierId = req.params.supplierId;
// const picture = req.file.filename; // Assuming the file field in the request is named 'picture'
// // Upload the picture to the GCP bucket
// const bucketName = 'armintaprofile_picture';
// const bucket = storage.bucket(bucketName);
// const uploadPath = `armintaprofile_picture/${picture}`;
// await bucket.upload(req.file.path, {
// destination: uploadPath,
// });
// console.log("bucket",bucket)
// console.log(uploadPath, "uploadPath")
// // Get the public URL of the uploaded picture
// const file = bucket.file(uploadPath);
// const [metadata] = await file.getMetadata();
// const pictureUrl = metadata.mediaLink;
// let profilePicture = await profilePictureSupplier.findOne({ supplierId });
// if (!profilePicture) {
// profilePicture = new profilePictureSupplier({
// supplierId,
// picture: pictureUrl,
// });
// } else {
// profilePicture.picture = pictureUrl;
// }
// await profilePicture.save();
// res.send({ message: 'Profile picture uploaded successfully' });
// });
// } catch (error) {
// res.status(500).send({ error: error.message });
// }
// };
// Route for fetching profile picture data
fastify.get('/api/users/profile-picture-supplier/:supplierId', async (req, res) => {

@ -325,7 +325,9 @@ fastify.get('/testtemp', (req, reply) => {
reply.view('layouts/main', {});
});
const multipart = require('fastify-multipart');
fastify.register(multipart);
//fastify-auth plugin is required so we can define routes in seperate files and verify jwt supplied in preHandlers for each request.
//const multer = require("fastify-multer");
fastify.register(require("fastify-auth"));
@ -350,6 +352,202 @@ fastify.register(require("./routes/adminRoute"));
fastify.register(require("./routes/forTestingRoute"));
const fs = require('fs');
const {Storage} = require('@google-cloud/storage');
const { Supplier, profilePictureSupplier } = require("./models/supplier");
const gc = new Storage({
keyFilename : path.join(__dirname, "../src/arminta-tank-keyFile.json"),
})
const bucketName = 'arminta_profile_pictures';
// Function to upload an image to the bucket
// async function uploadImageToBucket(imagePath) {
// const bucket = gc.bucket(bucketName);
// const imageFile = bucket.file(path.basename(imagePath));
// try {
// await imageFile.save(imagePath);
// console.log('Image uploaded successfully.');
// } catch (error) {
// console.error('Failed to upload image:', error);
// }
// }
// async function uploadImageToBucket(imagePath, supplierId) {
// const bucket = gc.bucket(bucketName);
// const imageFile = bucket.file(path.basename(imagePath));
// try {
// await imageFile.save(imagePath);
// console.log('Image uploaded successfully.');
// // Assuming you have a Supplier model/schema defined
// const supplier = await Supplier.findOne({ supplierId });
// if (!supplier) {
// console.error('Supplier not found.');
// return;
// }
// // Update the supplier document with the image URL
// supplier.imageUrl = `https://storage.googleapis.com/${bucketName}/${imageFile.name}`;
// await supplier.save();
// console.log('Image URL saved in the supplier document.');
// } catch (error) {
// console.error('Failed to upload image or save the URL:', error);
// }
// }
// const imagePath = '/home/bhaskar/Desktop/download.jpeg';
// const supplierId = 'AWSSSUR67'; // Replace with the actual supplier ID
// uploadImageToBucket(imagePath, supplierId)
// .then(() => {
// console.log('Image upload and update completed.');
// })
// .catch((error) => {
// console.error('Error occurred during image upload and update:', error);
// });
// const imagePath = '/home/bhaskar/Desktop/download.jpeg';
// uploadImageToBucket(imagePath);
// const imagePath = '/home/bhaskar/Desktop/download.jpeg';
// uploadImageToBucket(imagePath);
const acceptedFormats = ['image/jpeg', 'image/png'];
function truncateString(str, maxLength) {
if (str.length <= maxLength) {
return str;
}
return str.slice(0, maxLength);
}
fastify.post('/api/upload-image/:supplierId', {
schema: {
tags: ['Supplier'],
description: 'Upload Supplier profile picture',
params: {
type: 'object',
properties: {
supplierId: { type: 'string', description: 'ID of the supplier' },
},
},
body: {
type: 'object',
properties: {
imagePath: { type: 'string', description: 'Path of the image to upload' },
},
},
response: {
200: {
type: 'object',
properties: {
message: { type: 'string', description: 'Upload and update completed message' },
profilePicture: {
type: 'object',
properties: {
supplierId: { type: 'string', description: 'ID of the supplier' },
picture: { type: 'string', description: 'URL of the profile picture' },
},
},
},
},
404: {
type: 'object',
properties: {
error: { type: 'string', description: 'Supplier not found error message' },
},
},
500: {
type: 'object',
properties: {
error: { type: 'string', description: 'Failed to upload image error message' },
},
},
},
},
handler: async (request, reply) => {
const { supplierId } = request.params;
const imagePath = request.body.imagePath;
const bucket = gc.bucket(bucketName);
const imageFile = bucket.file(path.basename(imagePath));
try {
// Generate a unique filename
const uniqueFilename = `${Date.now()}_${path.basename(imagePath)}`;
// Truncate the unique filename to a specified length
const maxFilenameLength = 50; // Specify the maximum length for the filename
const truncatedFilename = truncateString(uniqueFilename, maxFilenameLength);
const uploadPath = path.join(path.dirname(imagePath), truncatedFilename);
await imageFile.save(uploadPath);
console.log('Image uploaded successfully.');
// await imageFile.save(imagePath);
// console.log('Image uploaded successfully.');
const imageUrl = `https://storage.googleapis.com/${bucketName}/${imageFile.name}`;
console.log("image---", imageUrl)
// Find the supplier document
const supplier = await Supplier.findOne({ supplierId });
if (!supplier) {
console.error('Supplier not found.');
reply.code(404).send({ error: 'Supplier not found' });
return;
}
// Create or update the profile picture for the supplier
let profilePicture = await profilePictureSupplier.findOne({ supplierId: supplierId });
console.log("profilePicture", profilePicture)
if (!profilePicture) {
profilePicture = new profilePictureSupplier({
supplierId: supplierId,
picture: imageUrl,
});
} else {
profilePicture.picture = imageUrl;
}
await profilePicture.save();
console.log("profilePicture", profilePicture.picture)
console.log('Image URL saved in the profile picture document.');
reply.send({
message: 'Image upload and update completed.',
profilePicture: {
supplierId: profilePicture.supplierId,
picture: profilePicture.picture,
},
});
} catch (error) {
console.error('Failed to upload image or save the URL:', error);
reply.code(500).send({ error: 'Failed to upload image' });
}
},
});
// fastify.get("/", (req, reply) => {
// reply.view("/templates/index.ejs", { text: "text" });

@ -132,34 +132,54 @@ const supplierSchema = new mongoose.Schema(
fcmId: { type: String, default: null },
});
// const profilePictureSupplierSchema = new Schema({
// // supplierId: {
// // type: String,unique: true,
// // required: true
// // },
// // // picture: {
// // // type: String,
// // // required: true
// // // }
// // picture: {
// // type: String,
// // required: true,
// // validate: {
// // validator: function (value) {
// // const supportedFormats = ['jpg', 'jpeg', 'png'];
// // const fileExtension = value.split('.').pop().toLowerCase();
// // return supportedFormats.includes(fileExtension);
// // },
// // message: 'Picture must be a JPEG, PNG or JPG image'
// // }
// // }
// supplierId: {
// type: String,
// unique: true,
// required: true
// },
// picture: {
// type: Buffer,
// required: true,
// validate: {
// validator: function (value) {
// const supportedFormats = ['jpg', 'jpeg', 'png'];
// const fileExtension = value.split('.').pop().toLowerCase();
// return supportedFormats.includes(fileExtension);
// },
// message: 'Picture must be a JPEG, PNG, or JPG image'
// }
// }
// });
const profilePictureSupplierSchema = new Schema({
// supplierId: {
// type: String,unique: true,
// required: true
// },
// // picture: {
// // type: String,
// // required: true
// // }
// picture: {
// type: String,
// required: true,
// validate: {
// validator: function (value) {
// const supportedFormats = ['jpg', 'jpeg', 'png'];
// const fileExtension = value.split('.').pop().toLowerCase();
// return supportedFormats.includes(fileExtension);
// },
// message: 'Picture must be a JPEG, PNG or JPG image'
// }
// }
supplierId: {
type: String,
unique: true,
required: true
},
picture: {
type: Buffer, // Store the file as binary data
type: String, // Change the type to String
required: true,
validate: {
validator: function (value) {

@ -43,6 +43,7 @@ const tanksSchema = new mongoose.Schema({
tankLocation: { type: String, default: null },
motor_status: { type: String, default: "0" },
waterlevel_at_midnight:{ type: String,default:"0" },
total_water_added_from_midnight:{ type: String,default:"0" },
connections: {
source: { type: String },
inputConnections: [
@ -74,7 +75,7 @@ const motordataSchema = new mongoose.Schema({
stopTime: { type: String, default: null },
supplier_type: { type: String, default: null },
receiver_type: { type: String, default: null },
quantity_delivered:{ type: String, default: null },
});

@ -577,59 +577,289 @@ module.exports = function (fastify, opts, next) {
// });
fastify.route({
method: 'POST',
url: '/api/users/profile-picture-supplier/:supplierId',
schema: {
tags: ['Supplier'],
description: 'Upload a profile picture for a supplier',
summary: 'Upload a profile picture for a supplier',
params: {
type: 'object',
properties: {
supplierId: {
type: 'string',
description: 'Supplier ID',
},
},
},
consumes: ['multipart/form-data'],
body: {
type: 'object',
properties: {
picture: {
type: 'string',
description: 'Profile picture file',
},
},
required: ['picture'],
},
response: {
200: {
description: 'Profile picture uploaded successfully',
type: 'object',
properties: {
message: { type: 'string' },
},
},
400: {
description: 'Failed to upload profile picture',
type: 'object',
properties: {
error: { type: 'string' },
},
},
500: {
description: 'Internal server error',
type: 'object',
properties: {
error: { type: 'string' },
},
},
},
},
handler: validationHandler.uploadProfilePicture,
});
// fastify.route({
// method: 'POST',
// url: '/api/users/profile-picture-supplier',
// schema: {
// tags: ['Supplier'],
// description: 'Upload a profile picture for a supplier',
// summary: 'Upload a profile picture for a supplier',
// params: {
// type: 'object',
// properties: {
// supplierId: {
// type: 'string',
// description: 'Supplier ID',
// },
// },
// },
// consumes: ['multipart/form-data'],
// body: {
// type: 'object',
// properties: {
// picture: {
// type: 'string',
// description: 'Profile picture file',
// },
// },
// required: ['picture'],
// },
// response: {
// 200: {
// description: 'Profile picture uploaded successfully',
// type: 'object',
// properties: {
// message: { type: 'string' },
// },
// },
// 400: {
// description: 'Failed to upload profile picture',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// 500: {
// description: 'Internal server error',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// },
// },
// handler: validationHandler.uploadProfilePicture,
// });
// fastify.route({
// method: 'POST',
// url: '/api/users/profile-picture-supplier',
// schema: {
// tags: ['Supplier'],
// description: 'Upload a profile picture for a supplier',
// summary: 'Upload a profile picture for a supplier',
// params: {
// type: 'object',
// properties: {
// supplierId: {
// type: 'string',
// description: 'Supplier ID',
// },
// },
// },
// consumes: ['multipart/form-data'],
// body: {
// type: 'object',
// properties: {
// picture: {
// type: 'string',
// properties: {
// data: { type: 'string' },
// encoding: { type: 'string' },
// filename: { type: 'string' },
// },
// required: ['data', 'encoding', 'filename'], // Add required properties
// },
// },
// required: ['picture'],
// },
// response: {
// 200: {
// description: 'Profile picture uploaded successfully',
// type: 'object',
// properties: {
// message: { type: 'string' },
// },
// },
// 400: {
// description: 'Failed to upload profile picture',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// 500: {
// description: 'Internal server error',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// },
// },
// handler: validationHandler.uploadProfilePicture,
// });
// fastify.route({
// method: 'POST',
// url: '/api/users/profile-picture-supplier',
// schema: {
// tags: ['Supplier'],
// description: 'Upload a profile picture for a supplier',
// summary: 'Upload a profile picture for a supplier',
// params: {
// type: 'object',
// properties: {
// supplierId: {
// type: 'string',
// description: 'Supplier ID',
// },
// },
// },
// consumes: ['multipart/form-data'],
// body: {
// type: 'object',
// properties: {
// picture: {
// type: 'string',
// description: 'Profile picture file',
// // format: 'binary', // Specify the format as binary for file upload
// },
// },
// required: ['picture'],
// },
// response: {
// 200: {
// description: 'Profile picture uploaded successfully',
// type: 'object',
// properties: {
// message: { type: 'string' },
// },
// },
// 400: {
// description: 'Failed to upload profile picture',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// 500: {
// description: 'Internal server error',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// },
// },
// handler: validationHandler.uploadProfilePicture,
// });
// fastify.route({
// method: 'POST',
// url: '/api/users/profile-picture-supplier',
// schema: {
// tags: ['Supplier'],
// description: 'Upload a profile picture for a supplier',
// summary: 'Upload a profile picture for a supplier',
// params: {
// type: 'object',
// properties: {
// supplierId: {
// type: 'string',
// description: 'Supplier ID',
// },
// },
// },
// consumes: ['multipart/form-data'],
// body: {
// type: 'object',
// properties: {
// file: {
// type: 'string',
// description: 'Profile picture file',
// },
// },
// required: ['picture'],
// },
// response: {
// 200: {
// description: 'Profile picture uploaded successfully',
// type: 'object',
// properties: {
// message: { type: 'string' },
// },
// },
// 400: {
// description: 'Failed to upload profile picture',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// 500: {
// description: 'Internal server error',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// },
// },
// handler: validationHandler.uploadProfilePicture,
// });
// const { Storage } = require('@google-cloud/storage');
// const storage = new Storage({
// keyFilename :`/home/bhaskar/Downloads/arminta-tank-0f492875da39.json`,
// projectId : `arminta-tank`
// });
// const bucketName = 'armintaprofile_picture';
// fastify.route({
// method: 'POST',
// url: '/api/users/profile-picture-supplier',
// schema: {
// tags: ['Supplier'],
// description: 'Upload a profile picture for a supplier',
// summary: 'Upload a profile picture for a supplier',
// // params: {
// // type: 'object',
// // properties: {
// // supplierId: {
// // type: 'string',
// // description: 'Supplier ID',
// // },
// // },
// // },
// consumes: ['multipart/form-data'],
// body: {
// type: 'object',
// properties: {
// supplierId: {
// type: 'string',
// description: 'Supplier ID',
// },
// },
// required: ['picture'],
// },
// response: {
// 200: {
// description: 'Profile picture uploaded successfully',
// type: 'object',
// properties: {
// message: { type: 'string' },
// },
// },
// 400: {
// description: 'Failed to upload profile picture',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// 500: {
// description: 'Internal server error',
// type: 'object',
// properties: {
// error: { type: 'string' },
// },
// },
// },
// },
// handler: validationHandler.uploadProfilePicture
// });

@ -244,7 +244,7 @@ module.exports = function (fastify, opts, next) {
action: { type: "string" },
percentage: { type: "string",default: "100" },
startTime:{ type: "string" },
stopTime:{ type: "string" },
stop_at:{type:"number"}
},
@ -523,7 +523,26 @@ module.exports = function (fastify, opts, next) {
preHandler: fastify.auth([fastify.authenticate]),
handler: tanksController.updatewaterlevelsatmidnight,
});
fastify.get("/api/deletemotordatarecordsbefore7days", {
schema: {
tags: ["Tank"],
description: "This is for deleting the data before 7 days in motorsdata",
summary: "This is for deleting the data before 7 days in motorsdata",
querystring: {
customerId: {type: 'string'}
},
security: [
{
basicAuth: [],
},
],
},
preHandler: fastify.auth([fastify.authenticate]),
handler: tanksController.deletemotordatarecordsbefore7days,
});
next();
}

Loading…
Cancel
Save