Dia antes primera install
This commit is contained in:
1
node_modules/.bin/cake
generated
vendored
Symbolic link
1
node_modules/.bin/cake
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../coffee-script/bin/cake
|
||||
1
node_modules/.bin/coffee
generated
vendored
Symbolic link
1
node_modules/.bin/coffee
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../coffee-script/bin/coffee
|
||||
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../color-support/bin.js
|
||||
1
node_modules/.bin/convert-excel-to-json
generated
vendored
Symbolic link
1
node_modules/.bin/convert-excel-to-json
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../convert-excel-to-json/bin/cli.js
|
||||
1
node_modules/.bin/crc32
generated
vendored
Symbolic link
1
node_modules/.bin/crc32
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../crc-32/bin/crc32.njs
|
||||
1
node_modules/.bin/inspect-function
generated
vendored
Symbolic link
1
node_modules/.bin/inspect-function
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../inspect-function/bin/magicli.js
|
||||
1
node_modules/.bin/inspect-parameters-declaration
generated
vendored
Symbolic link
1
node_modules/.bin/inspect-parameters-declaration
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../inspect-parameters-declaration/bin/cli.js
|
||||
1
node_modules/.bin/knex
generated
vendored
Symbolic link
1
node_modules/.bin/knex
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../knex/lib/bin/cli.js
|
||||
1
node_modules/.bin/mime
generated
vendored
Symbolic link
1
node_modules/.bin/mime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mime/cli.js
|
||||
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mkdirp/bin/cmd.js
|
||||
1
node_modules/.bin/node-gyp
generated
vendored
Symbolic link
1
node_modules/.bin/node-gyp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../node-gyp/bin/node-gyp.js
|
||||
1
node_modules/.bin/node-gyp-build
generated
vendored
Symbolic link
1
node_modules/.bin/node-gyp-build
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../node-gyp-build/bin.js
|
||||
1
node_modules/.bin/node-gyp-build-optional
generated
vendored
Symbolic link
1
node_modules/.bin/node-gyp-build-optional
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../node-gyp-build/optional.js
|
||||
1
node_modules/.bin/node-gyp-build-test
generated
vendored
Symbolic link
1
node_modules/.bin/node-gyp-build-test
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../node-gyp-build/build-test.js
|
||||
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../which/bin/node-which
|
||||
1
node_modules/.bin/nodezip
generated
vendored
Symbolic link
1
node_modules/.bin/nodezip
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../node-zip/bin/nodezip
|
||||
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../nopt/bin/nopt.js
|
||||
1
node_modules/.bin/object-to-arguments
generated
vendored
Symbolic link
1
node_modules/.bin/object-to-arguments
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../object-to-arguments/bin/cli.js
|
||||
1
node_modules/.bin/prebuild-install
generated
vendored
Symbolic link
1
node_modules/.bin/prebuild-install
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../prebuild-install/bin.js
|
||||
1
node_modules/.bin/printj
generated
vendored
Symbolic link
1
node_modules/.bin/printj
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../printj/bin/printj.njs
|
||||
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../rc/cli.js
|
||||
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../rimraf/bin.js
|
||||
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../semver/bin/semver.js
|
||||
1
node_modules/.bin/stringify-parameters
generated
vendored
Symbolic link
1
node_modules/.bin/stringify-parameters
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../stringify-parameters/bin/cli.js
|
||||
1
node_modules/.bin/user-home
generated
vendored
Symbolic link
1
node_modules/.bin/user-home
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../user-home/cli.js
|
||||
1
node_modules/.bin/xlsx
generated
vendored
Symbolic link
1
node_modules/.bin/xlsx
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../xlsx/bin/xlsx.njs
|
||||
3400
node_modules/.package-lock.json
generated
vendored
Normal file
3400
node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
node_modules/@gar/promisify/LICENSE.md
generated
vendored
Normal file
10
node_modules/@gar/promisify/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright © 2020-2022 Michael Garvin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
65
node_modules/@gar/promisify/README.md
generated
vendored
Normal file
65
node_modules/@gar/promisify/README.md
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# @gar/promisify
|
||||
|
||||
### Promisify an entire object or class instance
|
||||
|
||||
This module leverages es6 Proxy and Reflect to promisify every function in an
|
||||
object or class instance.
|
||||
|
||||
It assumes the callback that the function is expecting is the last
|
||||
parameter, and that it is an error-first callback with only one value,
|
||||
i.e. `(err, value) => ...`. This mirrors node's `util.promisify` method.
|
||||
|
||||
In order that you can use it as a one-stop-shop for all your promisify
|
||||
needs, you can also pass it a function. That function will be
|
||||
promisified as normal using node's built-in `util.promisify` method.
|
||||
|
||||
[node's custom promisified
|
||||
functions](https://nodejs.org/api/util.html#util_custom_promisified_functions)
|
||||
will also be mirrored, further allowing this to be a drop-in replacement
|
||||
for the built-in `util.promisify`.
|
||||
|
||||
### Examples
|
||||
|
||||
Promisify an entire object
|
||||
|
||||
```javascript
|
||||
|
||||
const promisify = require('@gar/promisify')
|
||||
|
||||
class Foo {
|
||||
constructor (attr) {
|
||||
this.attr = attr
|
||||
}
|
||||
|
||||
double (input, cb) {
|
||||
cb(null, input * 2)
|
||||
}
|
||||
|
||||
const foo = new Foo('baz')
|
||||
const promisified = promisify(foo)
|
||||
|
||||
console.log(promisified.attr)
|
||||
console.log(await promisified.double(1024))
|
||||
```
|
||||
|
||||
Promisify a function
|
||||
|
||||
```javascript
|
||||
|
||||
const promisify = require('@gar/promisify')
|
||||
|
||||
function foo (a, cb) {
|
||||
if (a !== 'bad') {
|
||||
return cb(null, 'ok')
|
||||
}
|
||||
return cb('not ok')
|
||||
}
|
||||
|
||||
const promisified = promisify(foo)
|
||||
|
||||
// This will resolve to 'ok'
|
||||
promisified('good')
|
||||
|
||||
// this will reject
|
||||
promisified('bad')
|
||||
```
|
||||
36
node_modules/@gar/promisify/index.js
generated
vendored
Normal file
36
node_modules/@gar/promisify/index.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
|
||||
const handler = {
|
||||
get: function (target, prop, receiver) {
|
||||
if (typeof target[prop] !== 'function') {
|
||||
return target[prop]
|
||||
}
|
||||
if (target[prop][promisify.custom]) {
|
||||
return function () {
|
||||
return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments)
|
||||
}
|
||||
}
|
||||
return function () {
|
||||
return new Promise((resolve, reject) => {
|
||||
Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve(result)
|
||||
}])
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function (thingToPromisify) {
|
||||
if (typeof thingToPromisify === 'function') {
|
||||
return promisify(thingToPromisify)
|
||||
}
|
||||
if (typeof thingToPromisify === 'object') {
|
||||
return new Proxy(thingToPromisify, handler)
|
||||
}
|
||||
throw new TypeError('Can only promisify functions or objects')
|
||||
}
|
||||
32
node_modules/@gar/promisify/package.json
generated
vendored
Normal file
32
node_modules/@gar/promisify/package.json
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "@gar/promisify",
|
||||
"version": "1.1.3",
|
||||
"description": "Promisify an entire class or object",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/wraithgar/gar-promisify.git"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"lint:fix": "standard --fix",
|
||||
"test": "lab -a @hapi/code -t 100",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"promisify",
|
||||
"all",
|
||||
"class",
|
||||
"object"
|
||||
],
|
||||
"author": "Gar <gar+npm@danger.computer>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@hapi/code": "^8.0.1",
|
||||
"@hapi/lab": "^24.1.0",
|
||||
"standard": "^16.0.3"
|
||||
}
|
||||
}
|
||||
20
node_modules/@npmcli/fs/LICENSE.md
generated
vendored
Normal file
20
node_modules/@npmcli/fs/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
|
||||
|
||||
ISC License
|
||||
|
||||
Copyright npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this
|
||||
software for any purpose with or without fee is hereby
|
||||
granted, provided that the above copyright notice and this
|
||||
permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
|
||||
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
|
||||
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
||||
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
60
node_modules/@npmcli/fs/README.md
generated
vendored
Normal file
60
node_modules/@npmcli/fs/README.md
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# @npmcli/fs
|
||||
|
||||
polyfills, and extensions, of the core `fs` module.
|
||||
|
||||
## Features
|
||||
|
||||
- all exposed functions return promises
|
||||
- `fs.rm` polyfill for node versions < 14.14.0
|
||||
- `fs.mkdir` polyfill adding support for the `recursive` and `force` options in node versions < 10.12.0
|
||||
- `fs.copyFile` extended to accept an `owner` option
|
||||
- `fs.mkdir` extended to accept an `owner` option
|
||||
- `fs.mkdtemp` extended to accept an `owner` option
|
||||
- `fs.writeFile` extended to accept an `owner` option
|
||||
- `fs.withTempDir` added
|
||||
- `fs.cp` polyfill for node < 16.7.0
|
||||
|
||||
## The `owner` option
|
||||
|
||||
The `copyFile`, `mkdir`, `mkdtemp`, `writeFile`, and `withTempDir` functions
|
||||
all accept a new `owner` property in their options. It can be used in two ways:
|
||||
|
||||
- `{ owner: { uid: 100, gid: 100 } }` - set the `uid` and `gid` explicitly
|
||||
- `{ owner: 100 }` - use one value, will set both `uid` and `gid` the same
|
||||
|
||||
The special string `'inherit'` may be passed instead of a number, which will
|
||||
cause this module to automatically determine the correct `uid` and/or `gid`
|
||||
from the nearest existing parent directory of the target.
|
||||
|
||||
## `fs.withTempDir(root, fn, options) -> Promise`
|
||||
|
||||
### Parameters
|
||||
|
||||
- `root`: the directory in which to create the temporary directory
|
||||
- `fn`: a function that will be called with the path to the temporary directory
|
||||
- `options`
|
||||
- `tmpPrefix`: a prefix to be used in the generated directory name
|
||||
|
||||
### Usage
|
||||
|
||||
The `withTempDir` function creates a temporary directory, runs the provided
|
||||
function (`fn`), then removes the temporary directory and resolves or rejects
|
||||
based on the result of `fn`.
|
||||
|
||||
```js
|
||||
const fs = require('@npmcli/fs')
|
||||
const os = require('os')
|
||||
|
||||
// this function will be called with the full path to the temporary directory
|
||||
// it is called with `await` behind the scenes, so can be async if desired.
|
||||
const myFunction = async (tempPath) => {
|
||||
return 'done!'
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const result = await fs.withTempDir(os.tmpdir(), myFunction)
|
||||
// result === 'done!'
|
||||
}
|
||||
|
||||
main()
|
||||
```
|
||||
17
node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js
generated
vendored
Normal file
17
node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
const url = require('url')
|
||||
|
||||
const node = require('../node.js')
|
||||
const polyfill = require('./polyfill.js')
|
||||
|
||||
const useNative = node.satisfies('>=10.12.0')
|
||||
|
||||
const fileURLToPath = (path) => {
|
||||
// the polyfill is tested separately from this module, no need to hack
|
||||
// process.version to try to trigger it just for coverage
|
||||
// istanbul ignore next
|
||||
return useNative
|
||||
? url.fileURLToPath(path)
|
||||
: polyfill(path)
|
||||
}
|
||||
|
||||
module.exports = fileURLToPath
|
||||
121
node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js
generated
vendored
Normal file
121
node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
const { URL, domainToUnicode } = require('url')
|
||||
|
||||
const CHAR_LOWERCASE_A = 97
|
||||
const CHAR_LOWERCASE_Z = 122
|
||||
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
class ERR_INVALID_FILE_URL_HOST extends TypeError {
|
||||
constructor (platform) {
|
||||
super(`File URL host must be "localhost" or empty on ${platform}`)
|
||||
this.code = 'ERR_INVALID_FILE_URL_HOST'
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
class ERR_INVALID_FILE_URL_PATH extends TypeError {
|
||||
constructor (msg) {
|
||||
super(`File URL path ${msg}`)
|
||||
this.code = 'ERR_INVALID_FILE_URL_PATH'
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
class ERR_INVALID_ARG_TYPE extends TypeError {
|
||||
constructor (name, actual) {
|
||||
super(`The "${name}" argument must be one of type string or an instance ` +
|
||||
`of URL. Received type ${typeof actual} ${actual}`)
|
||||
this.code = 'ERR_INVALID_ARG_TYPE'
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
class ERR_INVALID_URL_SCHEME extends TypeError {
|
||||
constructor (expected) {
|
||||
super(`The URL must be of scheme ${expected}`)
|
||||
this.code = 'ERR_INVALID_URL_SCHEME'
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
const isURLInstance = (input) => {
|
||||
return input != null && input.href && input.origin
|
||||
}
|
||||
|
||||
const getPathFromURLWin32 = (url) => {
|
||||
const hostname = url.hostname
|
||||
let pathname = url.pathname
|
||||
for (let n = 0; n < pathname.length; n++) {
|
||||
if (pathname[n] === '%') {
|
||||
const third = pathname.codePointAt(n + 2) | 0x20
|
||||
if ((pathname[n + 1] === '2' && third === 102) ||
|
||||
(pathname[n + 1] === '5' && third === 99)) {
|
||||
throw new ERR_INVALID_FILE_URL_PATH('must not include encoded \\ or / characters')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pathname = pathname.replace(/\//g, '\\')
|
||||
pathname = decodeURIComponent(pathname)
|
||||
if (hostname !== '') {
|
||||
return `\\\\${domainToUnicode(hostname)}${pathname}`
|
||||
}
|
||||
|
||||
const letter = pathname.codePointAt(1) | 0x20
|
||||
const sep = pathname[2]
|
||||
if (letter < CHAR_LOWERCASE_A || letter > CHAR_LOWERCASE_Z ||
|
||||
(sep !== ':')) {
|
||||
throw new ERR_INVALID_FILE_URL_PATH('must be absolute')
|
||||
}
|
||||
|
||||
return pathname.slice(1)
|
||||
}
|
||||
|
||||
const getPathFromURLPosix = (url) => {
|
||||
if (url.hostname !== '') {
|
||||
throw new ERR_INVALID_FILE_URL_HOST(process.platform)
|
||||
}
|
||||
|
||||
const pathname = url.pathname
|
||||
|
||||
for (let n = 0; n < pathname.length; n++) {
|
||||
if (pathname[n] === '%') {
|
||||
const third = pathname.codePointAt(n + 2) | 0x20
|
||||
if (pathname[n + 1] === '2' && third === 102) {
|
||||
throw new ERR_INVALID_FILE_URL_PATH('must not include encoded / characters')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return decodeURIComponent(pathname)
|
||||
}
|
||||
|
||||
const fileURLToPath = (path) => {
|
||||
if (typeof path === 'string') {
|
||||
path = new URL(path)
|
||||
} else if (!isURLInstance(path)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('path', ['string', 'URL'], path)
|
||||
}
|
||||
|
||||
if (path.protocol !== 'file:') {
|
||||
throw new ERR_INVALID_URL_SCHEME('file')
|
||||
}
|
||||
|
||||
return isWindows
|
||||
? getPathFromURLWin32(path)
|
||||
: getPathFromURLPosix(path)
|
||||
}
|
||||
|
||||
module.exports = fileURLToPath
|
||||
20
node_modules/@npmcli/fs/lib/common/get-options.js
generated
vendored
Normal file
20
node_modules/@npmcli/fs/lib/common/get-options.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
// given an input that may or may not be an object, return an object that has
|
||||
// a copy of every defined property listed in 'copy'. if the input is not an
|
||||
// object, assign it to the property named by 'wrap'
|
||||
const getOptions = (input, { copy, wrap }) => {
|
||||
const result = {}
|
||||
|
||||
if (input && typeof input === 'object') {
|
||||
for (const prop of copy) {
|
||||
if (input[prop] !== undefined) {
|
||||
result[prop] = input[prop]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result[wrap] = input
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = getOptions
|
||||
9
node_modules/@npmcli/fs/lib/common/node.js
generated
vendored
Normal file
9
node_modules/@npmcli/fs/lib/common/node.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
const semver = require('semver')
|
||||
|
||||
const satisfies = (range) => {
|
||||
return semver.satisfies(process.version, range, { includePrerelease: true })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
satisfies,
|
||||
}
|
||||
92
node_modules/@npmcli/fs/lib/common/owner.js
generated
vendored
Normal file
92
node_modules/@npmcli/fs/lib/common/owner.js
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
const { dirname, resolve } = require('path')
|
||||
|
||||
const fileURLToPath = require('./file-url-to-path/index.js')
|
||||
const fs = require('../fs.js')
|
||||
|
||||
// given a path, find the owner of the nearest parent
|
||||
const find = async (path) => {
|
||||
// if we have no getuid, permissions are irrelevant on this platform
|
||||
if (!process.getuid) {
|
||||
return {}
|
||||
}
|
||||
|
||||
// fs methods accept URL objects with a scheme of file: so we need to unwrap
|
||||
// those into an actual path string before we can resolve it
|
||||
const resolved = path != null && path.href && path.origin
|
||||
? resolve(fileURLToPath(path))
|
||||
: resolve(path)
|
||||
|
||||
let stat
|
||||
|
||||
try {
|
||||
stat = await fs.lstat(resolved)
|
||||
} finally {
|
||||
// if we got a stat, return its contents
|
||||
if (stat) {
|
||||
return { uid: stat.uid, gid: stat.gid }
|
||||
}
|
||||
|
||||
// try the parent directory
|
||||
if (resolved !== dirname(resolved)) {
|
||||
return find(dirname(resolved))
|
||||
}
|
||||
|
||||
// no more parents, never got a stat, just return an empty object
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
// given a path, uid, and gid update the ownership of the path if necessary
|
||||
const update = async (path, uid, gid) => {
|
||||
// nothing to update, just exit
|
||||
if (uid === undefined && gid === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// see if the permissions are already the same, if they are we don't
|
||||
// need to do anything, so return early
|
||||
const stat = await fs.stat(path)
|
||||
if (uid === stat.uid && gid === stat.gid) {
|
||||
return
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
try {
|
||||
await fs.chown(path, uid, gid)
|
||||
} catch (err) {}
|
||||
}
|
||||
|
||||
// accepts a `path` and the `owner` property of an options object and normalizes
|
||||
// it into an object with numerical `uid` and `gid`
|
||||
const validate = async (path, input) => {
|
||||
let uid
|
||||
let gid
|
||||
|
||||
if (typeof input === 'string' || typeof input === 'number') {
|
||||
uid = input
|
||||
gid = input
|
||||
} else if (input && typeof input === 'object') {
|
||||
uid = input.uid
|
||||
gid = input.gid
|
||||
}
|
||||
|
||||
if (uid === 'inherit' || gid === 'inherit') {
|
||||
const owner = await find(path)
|
||||
if (uid === 'inherit') {
|
||||
uid = owner.uid
|
||||
}
|
||||
|
||||
if (gid === 'inherit') {
|
||||
gid = owner.gid
|
||||
}
|
||||
}
|
||||
|
||||
return { uid, gid }
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
find,
|
||||
update,
|
||||
validate,
|
||||
}
|
||||
22
node_modules/@npmcli/fs/lib/copy-file.js
generated
vendored
Normal file
22
node_modules/@npmcli/fs/lib/copy-file.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
const fs = require('./fs.js')
|
||||
const getOptions = require('./common/get-options.js')
|
||||
const owner = require('./common/owner.js')
|
||||
|
||||
const copyFile = async (src, dest, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['mode', 'owner'],
|
||||
wrap: 'mode',
|
||||
})
|
||||
|
||||
const { uid, gid } = await owner.validate(dest, options.owner)
|
||||
|
||||
// the node core method as of 16.5.0 does not support the mode being in an
|
||||
// object, so we have to pass the mode value directly
|
||||
const result = await fs.copyFile(src, dest, options.mode)
|
||||
|
||||
await owner.update(dest, uid, gid)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = copyFile
|
||||
15
node_modules/@npmcli/fs/lib/cp/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/fs/lib/cp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2011-2017 JP Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
22
node_modules/@npmcli/fs/lib/cp/index.js
generated
vendored
Normal file
22
node_modules/@npmcli/fs/lib/cp/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
const fs = require('../fs.js')
|
||||
const getOptions = require('../common/get-options.js')
|
||||
const node = require('../common/node.js')
|
||||
const polyfill = require('./polyfill.js')
|
||||
|
||||
// node 16.7.0 added fs.cp
|
||||
const useNative = node.satisfies('>=16.7.0')
|
||||
|
||||
const cp = async (src, dest, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
|
||||
})
|
||||
|
||||
// the polyfill is tested separately from this module, no need to hack
|
||||
// process.version to try to trigger it just for coverage
|
||||
// istanbul ignore next
|
||||
return useNative
|
||||
? fs.cp(src, dest, options)
|
||||
: polyfill(src, dest, options)
|
||||
}
|
||||
|
||||
module.exports = cp
|
||||
428
node_modules/@npmcli/fs/lib/cp/polyfill.js
generated
vendored
Normal file
428
node_modules/@npmcli/fs/lib/cp/polyfill.js
generated
vendored
Normal file
@@ -0,0 +1,428 @@
|
||||
// this file is a modified version of the code in node 17.2.0
|
||||
// which is, in turn, a modified version of the fs-extra module on npm
|
||||
// node core changes:
|
||||
// - Use of the assert module has been replaced with core's error system.
|
||||
// - All code related to the glob dependency has been removed.
|
||||
// - Bring your own custom fs module is not currently supported.
|
||||
// - Some basic code cleanup.
|
||||
// changes here:
|
||||
// - remove all callback related code
|
||||
// - drop sync support
|
||||
// - change assertions back to non-internal methods (see options.js)
|
||||
// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ERR_FS_CP_DIR_TO_NON_DIR,
|
||||
ERR_FS_CP_EEXIST,
|
||||
ERR_FS_CP_EINVAL,
|
||||
ERR_FS_CP_FIFO_PIPE,
|
||||
ERR_FS_CP_NON_DIR_TO_DIR,
|
||||
ERR_FS_CP_SOCKET,
|
||||
ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
|
||||
ERR_FS_CP_UNKNOWN,
|
||||
ERR_FS_EISDIR,
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
} = require('../errors.js')
|
||||
const {
|
||||
constants: {
|
||||
errno: {
|
||||
EEXIST,
|
||||
EISDIR,
|
||||
EINVAL,
|
||||
ENOTDIR,
|
||||
},
|
||||
},
|
||||
} = require('os')
|
||||
const {
|
||||
chmod,
|
||||
copyFile,
|
||||
lstat,
|
||||
mkdir,
|
||||
readdir,
|
||||
readlink,
|
||||
stat,
|
||||
symlink,
|
||||
unlink,
|
||||
utimes,
|
||||
} = require('../fs.js')
|
||||
const {
|
||||
dirname,
|
||||
isAbsolute,
|
||||
join,
|
||||
parse,
|
||||
resolve,
|
||||
sep,
|
||||
toNamespacedPath,
|
||||
} = require('path')
|
||||
const { fileURLToPath } = require('url')
|
||||
|
||||
const defaultOptions = {
|
||||
dereference: false,
|
||||
errorOnExist: false,
|
||||
filter: undefined,
|
||||
force: true,
|
||||
preserveTimestamps: false,
|
||||
recursive: false,
|
||||
}
|
||||
|
||||
async function cp (src, dest, opts) {
|
||||
if (opts != null && typeof opts !== 'object') {
|
||||
throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
|
||||
}
|
||||
return cpFn(
|
||||
toNamespacedPath(getValidatedPath(src)),
|
||||
toNamespacedPath(getValidatedPath(dest)),
|
||||
{ ...defaultOptions, ...opts })
|
||||
}
|
||||
|
||||
function getValidatedPath (fileURLOrPath) {
|
||||
const path = fileURLOrPath != null && fileURLOrPath.href
|
||||
&& fileURLOrPath.origin
|
||||
? fileURLToPath(fileURLOrPath)
|
||||
: fileURLOrPath
|
||||
return path
|
||||
}
|
||||
|
||||
async function cpFn (src, dest, opts) {
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
// istanbul ignore next
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
const warning = 'Using the preserveTimestamps option in 32-bit ' +
|
||||
'node is not recommended'
|
||||
process.emitWarning(warning, 'TimestampPrecisionWarning')
|
||||
}
|
||||
const stats = await checkPaths(src, dest, opts)
|
||||
const { srcStat, destStat } = stats
|
||||
await checkParentPaths(src, srcStat, dest)
|
||||
if (opts.filter) {
|
||||
return handleFilter(checkParentDir, destStat, src, dest, opts)
|
||||
}
|
||||
return checkParentDir(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function checkPaths (src, dest, opts) {
|
||||
const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: 'src and dest cannot be the same',
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
throw new ERR_FS_CP_DIR_TO_NON_DIR({
|
||||
message: `cannot overwrite directory ${src} ` +
|
||||
`with non-directory ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EISDIR,
|
||||
})
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
throw new ERR_FS_CP_NON_DIR_TO_DIR({
|
||||
message: `cannot overwrite non-directory ${src} ` +
|
||||
`with directory ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: ENOTDIR,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
function areIdentical (srcStat, destStat) {
|
||||
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
|
||||
destStat.dev === srcStat.dev
|
||||
}
|
||||
|
||||
function getStats (src, dest, opts) {
|
||||
const statFunc = opts.dereference ?
|
||||
(file) => stat(file, { bigint: true }) :
|
||||
(file) => lstat(file, { bigint: true })
|
||||
return Promise.all([
|
||||
statFunc(src),
|
||||
statFunc(dest).catch((err) => {
|
||||
// istanbul ignore next: unsure how to cover.
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
// istanbul ignore next: unsure how to cover.
|
||||
throw err
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
async function checkParentDir (destStat, src, dest, opts) {
|
||||
const destParent = dirname(dest)
|
||||
const dirExists = await pathExists(destParent)
|
||||
if (dirExists) {
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
await mkdir(destParent, { recursive: true })
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function pathExists (dest) {
|
||||
return stat(dest).then(
|
||||
() => true,
|
||||
// istanbul ignore next: not sure when this would occur
|
||||
(err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
|
||||
}
|
||||
|
||||
// Recursively check if dest parent is a subdirectory of src.
|
||||
// It works for all file types including symlinks since it
|
||||
// checks the src and dest inodes. It starts from the deepest
|
||||
// parent and stops once it reaches the src parent or the root path.
|
||||
async function checkParentPaths (src, srcStat, dest) {
|
||||
const srcParent = resolve(dirname(src))
|
||||
const destParent = resolve(dirname(dest))
|
||||
if (destParent === srcParent || destParent === parse(destParent).root) {
|
||||
return
|
||||
}
|
||||
let destStat
|
||||
try {
|
||||
destStat = await stat(destParent, { bigint: true })
|
||||
} catch (err) {
|
||||
// istanbul ignore else: not sure when this would occur
|
||||
if (err.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
// istanbul ignore next: not sure when this would occur
|
||||
throw err
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent)
|
||||
}
|
||||
|
||||
const normalizePathToArray = (path) =>
|
||||
resolve(path).split(sep).filter(Boolean)
|
||||
|
||||
// Return true if dest is a subdir of src, otherwise false.
|
||||
// It only checks the path strings.
|
||||
function isSrcSubdir (src, dest) {
|
||||
const srcArr = normalizePathToArray(src)
|
||||
const destArr = normalizePathToArray(dest)
|
||||
return srcArr.every((cur, i) => destArr[i] === cur)
|
||||
}
|
||||
|
||||
async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
|
||||
const include = await opts.filter(src, dest)
|
||||
if (include) {
|
||||
return onInclude(destStat, src, dest, opts, cb)
|
||||
}
|
||||
}
|
||||
|
||||
function startCopy (destStat, src, dest, opts) {
|
||||
if (opts.filter) {
|
||||
return handleFilter(getStatsForCopy, destStat, src, dest, opts)
|
||||
}
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function getStatsForCopy (destStat, src, dest, opts) {
|
||||
const statFn = opts.dereference ? stat : lstat
|
||||
const srcStat = await statFn(src)
|
||||
// istanbul ignore else: can't portably test FIFO
|
||||
if (srcStat.isDirectory() && opts.recursive) {
|
||||
return onDir(srcStat, destStat, src, dest, opts)
|
||||
} else if (srcStat.isDirectory()) {
|
||||
throw new ERR_FS_EISDIR({
|
||||
message: `${src} is a directory (not copied)`,
|
||||
path: src,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
} else if (srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()) {
|
||||
return onFile(srcStat, destStat, src, dest, opts)
|
||||
} else if (srcStat.isSymbolicLink()) {
|
||||
return onLink(destStat, src, dest)
|
||||
} else if (srcStat.isSocket()) {
|
||||
throw new ERR_FS_CP_SOCKET({
|
||||
message: `cannot copy a socket file: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
} else if (srcStat.isFIFO()) {
|
||||
throw new ERR_FS_CP_FIFO_PIPE({
|
||||
message: `cannot copy a FIFO pipe: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
// istanbul ignore next: should be unreachable
|
||||
throw new ERR_FS_CP_UNKNOWN({
|
||||
message: `cannot copy an unknown file type: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
|
||||
function onFile (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) {
|
||||
return _copyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
return mayCopyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function mayCopyFile (srcStat, src, dest, opts) {
|
||||
if (opts.force) {
|
||||
await unlink(dest)
|
||||
return _copyFile(srcStat, src, dest, opts)
|
||||
} else if (opts.errorOnExist) {
|
||||
throw new ERR_FS_CP_EEXIST({
|
||||
message: `${dest} already exists`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EEXIST,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function _copyFile (srcStat, src, dest, opts) {
|
||||
await copyFile(src, dest)
|
||||
if (opts.preserveTimestamps) {
|
||||
return handleTimestampsAndMode(srcStat.mode, src, dest)
|
||||
}
|
||||
return setDestMode(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
async function handleTimestampsAndMode (srcMode, src, dest) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) {
|
||||
await makeFileWritable(dest, srcMode)
|
||||
return setDestTimestampsAndMode(srcMode, src, dest)
|
||||
}
|
||||
return setDestTimestampsAndMode(srcMode, src, dest)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return setDestMode(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
async function setDestTimestampsAndMode (srcMode, src, dest) {
|
||||
await setDestTimestamps(src, dest)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode) {
|
||||
return chmod(dest, srcMode)
|
||||
}
|
||||
|
||||
async function setDestTimestamps (src, dest) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = await stat(src)
|
||||
return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) {
|
||||
return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
||||
}
|
||||
return copyDir(src, dest, opts)
|
||||
}
|
||||
|
||||
async function mkDirAndCopy (srcMode, src, dest, opts) {
|
||||
await mkdir(dest)
|
||||
await copyDir(src, dest, opts)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
async function copyDir (src, dest, opts) {
|
||||
const dir = await readdir(src)
|
||||
for (let i = 0; i < dir.length; i++) {
|
||||
const item = dir[i]
|
||||
const srcItem = join(src, item)
|
||||
const destItem = join(dest, item)
|
||||
const { destStat } = await checkPaths(srcItem, destItem, opts)
|
||||
await startCopy(destStat, srcItem, destItem, opts)
|
||||
}
|
||||
}
|
||||
|
||||
async function onLink (destStat, src, dest) {
|
||||
let resolvedSrc = await readlink(src)
|
||||
if (!isAbsolute(resolvedSrc)) {
|
||||
resolvedSrc = resolve(dirname(src), resolvedSrc)
|
||||
}
|
||||
if (!destStat) {
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
let resolvedDest
|
||||
try {
|
||||
resolvedDest = await readlink(dest)
|
||||
} catch (err) {
|
||||
// Dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
// istanbul ignore next: can only test on windows
|
||||
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
// istanbul ignore next: should not be possible
|
||||
throw err
|
||||
}
|
||||
if (!isAbsolute(resolvedDest)) {
|
||||
resolvedDest = resolve(dirname(dest), resolvedDest)
|
||||
}
|
||||
if (isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
|
||||
`${resolvedDest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
// Do not copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
const srcStat = await stat(src)
|
||||
if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
|
||||
message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return copyLink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
async function copyLink (resolvedSrc, dest) {
|
||||
await unlink(dest)
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
module.exports = cp
|
||||
129
node_modules/@npmcli/fs/lib/errors.js
generated
vendored
Normal file
129
node_modules/@npmcli/fs/lib/errors.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
'use strict'
|
||||
const { inspect } = require('util')
|
||||
|
||||
// adapted from node's internal/errors
|
||||
// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
|
||||
|
||||
// close copy of node's internal SystemError class.
|
||||
class SystemError {
|
||||
constructor (code, prefix, context) {
|
||||
// XXX context.code is undefined in all constructors used in cp/polyfill
|
||||
// that may be a bug copied from node, maybe the constructor should use
|
||||
// `code` not `errno`? nodejs/node#41104
|
||||
let message = `${prefix}: ${context.syscall} returned ` +
|
||||
`${context.code} (${context.message})`
|
||||
|
||||
if (context.path !== undefined) {
|
||||
message += ` ${context.path}`
|
||||
}
|
||||
if (context.dest !== undefined) {
|
||||
message += ` => ${context.dest}`
|
||||
}
|
||||
|
||||
this.code = code
|
||||
Object.defineProperties(this, {
|
||||
name: {
|
||||
value: 'SystemError',
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
},
|
||||
message: {
|
||||
value: message,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
},
|
||||
info: {
|
||||
value: context,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
},
|
||||
errno: {
|
||||
get () {
|
||||
return context.errno
|
||||
},
|
||||
set (value) {
|
||||
context.errno = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
},
|
||||
syscall: {
|
||||
get () {
|
||||
return context.syscall
|
||||
},
|
||||
set (value) {
|
||||
context.syscall = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (context.path !== undefined) {
|
||||
Object.defineProperty(this, 'path', {
|
||||
get () {
|
||||
return context.path
|
||||
},
|
||||
set (value) {
|
||||
context.path = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
})
|
||||
}
|
||||
|
||||
if (context.dest !== undefined) {
|
||||
Object.defineProperty(this, 'dest', {
|
||||
get () {
|
||||
return context.dest
|
||||
},
|
||||
set (value) {
|
||||
context.dest = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
|
||||
[Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
|
||||
return inspect(this, {
|
||||
...ctx,
|
||||
getters: true,
|
||||
customInspect: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function E (code, message) {
|
||||
module.exports[code] = class NodeError extends SystemError {
|
||||
constructor (ctx) {
|
||||
super(code, message, ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
|
||||
E('ERR_FS_CP_EEXIST', 'Target already exists')
|
||||
E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
|
||||
E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
|
||||
E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
|
||||
E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
|
||||
E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
|
||||
E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
|
||||
E('ERR_FS_EISDIR', 'Path is a directory')
|
||||
|
||||
module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
|
||||
constructor (name, expected, actual) {
|
||||
super()
|
||||
this.code = 'ERR_INVALID_ARG_TYPE'
|
||||
this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
|
||||
}
|
||||
}
|
||||
8
node_modules/@npmcli/fs/lib/fs.js
generated
vendored
Normal file
8
node_modules/@npmcli/fs/lib/fs.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const fs = require('fs')
|
||||
const promisify = require('@gar/promisify')
|
||||
|
||||
// this module returns the core fs module wrapped in a proxy that promisifies
|
||||
// method calls within the getter. we keep it in a separate module so that the
|
||||
// overridden methods have a consistent way to get to promisified fs methods
|
||||
// without creating a circular dependency
|
||||
module.exports = promisify(fs)
|
||||
10
node_modules/@npmcli/fs/lib/index.js
generated
vendored
Normal file
10
node_modules/@npmcli/fs/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
...require('./fs.js'),
|
||||
copyFile: require('./copy-file.js'),
|
||||
cp: require('./cp/index.js'),
|
||||
mkdir: require('./mkdir/index.js'),
|
||||
mkdtemp: require('./mkdtemp.js'),
|
||||
rm: require('./rm/index.js'),
|
||||
withTempDir: require('./with-temp-dir.js'),
|
||||
writeFile: require('./write-file.js'),
|
||||
}
|
||||
32
node_modules/@npmcli/fs/lib/mkdir/index.js
generated
vendored
Normal file
32
node_modules/@npmcli/fs/lib/mkdir/index.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
const fs = require('../fs.js')
|
||||
const getOptions = require('../common/get-options.js')
|
||||
const node = require('../common/node.js')
|
||||
const owner = require('../common/owner.js')
|
||||
|
||||
const polyfill = require('./polyfill.js')
|
||||
|
||||
// node 10.12.0 added the options parameter, which allows recursive and mode
|
||||
// properties to be passed
|
||||
const useNative = node.satisfies('>=10.12.0')
|
||||
|
||||
// extends mkdir with the ability to specify an owner of the new dir
|
||||
const mkdir = async (path, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['mode', 'recursive', 'owner'],
|
||||
wrap: 'mode',
|
||||
})
|
||||
const { uid, gid } = await owner.validate(path, options.owner)
|
||||
|
||||
// the polyfill is tested separately from this module, no need to hack
|
||||
// process.version to try to trigger it just for coverage
|
||||
// istanbul ignore next
|
||||
const result = useNative
|
||||
? await fs.mkdir(path, options)
|
||||
: await polyfill(path, options)
|
||||
|
||||
await owner.update(path, uid, gid)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = mkdir
|
||||
81
node_modules/@npmcli/fs/lib/mkdir/polyfill.js
generated
vendored
Normal file
81
node_modules/@npmcli/fs/lib/mkdir/polyfill.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
const { dirname } = require('path')
|
||||
|
||||
const fileURLToPath = require('../common/file-url-to-path/index.js')
|
||||
const fs = require('../fs.js')
|
||||
|
||||
const defaultOptions = {
|
||||
mode: 0o777,
|
||||
recursive: false,
|
||||
}
|
||||
|
||||
const mkdir = async (path, opts) => {
|
||||
const options = { ...defaultOptions, ...opts }
|
||||
|
||||
// if we're not in recursive mode, just call the real mkdir with the path and
|
||||
// the mode option only
|
||||
if (!options.recursive) {
|
||||
return fs.mkdir(path, options.mode)
|
||||
}
|
||||
|
||||
const makeDirectory = async (dir, mode) => {
|
||||
// we can't use dirname directly since these functions support URL
|
||||
// objects with the file: protocol as the path input, so first we get a
|
||||
// string path, then we can call dirname on that
|
||||
const parent = dir != null && dir.href && dir.origin
|
||||
? dirname(fileURLToPath(dir))
|
||||
: dirname(dir)
|
||||
|
||||
// if the parent is the dir itself, try to create it. anything but EISDIR
|
||||
// should be rethrown
|
||||
if (parent === dir) {
|
||||
try {
|
||||
await fs.mkdir(dir, opts)
|
||||
} catch (err) {
|
||||
if (err.code !== 'EISDIR') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.mkdir(dir, mode)
|
||||
return dir
|
||||
} catch (err) {
|
||||
// ENOENT means the parent wasn't there, so create that
|
||||
if (err.code === 'ENOENT') {
|
||||
const made = await makeDirectory(parent, mode)
|
||||
await makeDirectory(dir, mode)
|
||||
// return the shallowest path we created, i.e. the result of creating
|
||||
// the parent
|
||||
return made
|
||||
}
|
||||
|
||||
// an EEXIST means there's already something there
|
||||
// an EROFS means we have a read-only filesystem and can't create a dir
|
||||
// any other error is fatal and we should give up now
|
||||
if (err.code !== 'EEXIST' && err.code !== 'EROFS') {
|
||||
throw err
|
||||
}
|
||||
|
||||
// stat the directory, if the result is a directory, then we successfully
|
||||
// created this one so return its path. otherwise, we reject with the
|
||||
// original error by ignoring the error in the catch
|
||||
try {
|
||||
const stat = await fs.stat(dir)
|
||||
if (stat.isDirectory()) {
|
||||
// if it already existed, we didn't create anything so return
|
||||
// undefined
|
||||
return undefined
|
||||
}
|
||||
} catch (_) {}
|
||||
|
||||
// if the thing that's there isn't a directory, then just re-throw
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
return makeDirectory(path, options.mode)
|
||||
}
|
||||
|
||||
module.exports = mkdir
|
||||
28
node_modules/@npmcli/fs/lib/mkdtemp.js
generated
vendored
Normal file
28
node_modules/@npmcli/fs/lib/mkdtemp.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
const { dirname, sep } = require('path')
|
||||
|
||||
const fs = require('./fs.js')
|
||||
const getOptions = require('./common/get-options.js')
|
||||
const owner = require('./common/owner.js')
|
||||
|
||||
const mkdtemp = async (prefix, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['encoding', 'owner'],
|
||||
wrap: 'encoding',
|
||||
})
|
||||
|
||||
// mkdtemp relies on the trailing path separator to indicate if it should
|
||||
// create a directory inside of the prefix. if that's the case then the root
|
||||
// we infer ownership from is the prefix itself, otherwise it's the dirname
|
||||
// /tmp -> /tmpABCDEF, infers from /
|
||||
// /tmp/ -> /tmp/ABCDEF, infers from /tmp
|
||||
const root = prefix.endsWith(sep) ? prefix : dirname(prefix)
|
||||
const { uid, gid } = await owner.validate(root, options.owner)
|
||||
|
||||
const result = await fs.mkdtemp(prefix, options)
|
||||
|
||||
await owner.update(result, uid, gid)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = mkdtemp
|
||||
22
node_modules/@npmcli/fs/lib/rm/index.js
generated
vendored
Normal file
22
node_modules/@npmcli/fs/lib/rm/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
const fs = require('../fs.js')
|
||||
const getOptions = require('../common/get-options.js')
|
||||
const node = require('../common/node.js')
|
||||
const polyfill = require('./polyfill.js')
|
||||
|
||||
// node 14.14.0 added fs.rm, which allows both the force and recursive options
|
||||
const useNative = node.satisfies('>=14.14.0')
|
||||
|
||||
const rm = async (path, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['retryDelay', 'maxRetries', 'recursive', 'force'],
|
||||
})
|
||||
|
||||
// the polyfill is tested separately from this module, no need to hack
|
||||
// process.version to try to trigger it just for coverage
|
||||
// istanbul ignore next
|
||||
return useNative
|
||||
? fs.rm(path, options)
|
||||
: polyfill(path, options)
|
||||
}
|
||||
|
||||
module.exports = rm
|
||||
239
node_modules/@npmcli/fs/lib/rm/polyfill.js
generated
vendored
Normal file
239
node_modules/@npmcli/fs/lib/rm/polyfill.js
generated
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
// this file is a modified version of the code in node core >=14.14.0
|
||||
// which is, in turn, a modified version of the rimraf module on npm
|
||||
// node core changes:
|
||||
// - Use of the assert module has been replaced with core's error system.
|
||||
// - All code related to the glob dependency has been removed.
|
||||
// - Bring your own custom fs module is not currently supported.
|
||||
// - Some basic code cleanup.
|
||||
// changes here:
|
||||
// - remove all callback related code
|
||||
// - drop sync support
|
||||
// - change assertions back to non-internal methods (see options.js)
|
||||
// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
|
||||
const errnos = require('os').constants.errno
|
||||
const { join } = require('path')
|
||||
const fs = require('../fs.js')
|
||||
|
||||
// error codes that mean we need to remove contents
|
||||
const notEmptyCodes = new Set([
|
||||
'ENOTEMPTY',
|
||||
'EEXIST',
|
||||
'EPERM',
|
||||
])
|
||||
|
||||
// error codes we can retry later
|
||||
const retryCodes = new Set([
|
||||
'EBUSY',
|
||||
'EMFILE',
|
||||
'ENFILE',
|
||||
'ENOTEMPTY',
|
||||
'EPERM',
|
||||
])
|
||||
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
const defaultOptions = {
|
||||
retryDelay: 100,
|
||||
maxRetries: 0,
|
||||
recursive: false,
|
||||
force: false,
|
||||
}
|
||||
|
||||
// this is drastically simplified, but should be roughly equivalent to what
|
||||
// node core throws
|
||||
class ERR_FS_EISDIR extends Error {
|
||||
constructor (path) {
|
||||
super()
|
||||
this.info = {
|
||||
code: 'EISDIR',
|
||||
message: 'is a directory',
|
||||
path,
|
||||
syscall: 'rm',
|
||||
errno: errnos.EISDIR,
|
||||
}
|
||||
this.name = 'SystemError'
|
||||
this.code = 'ERR_FS_EISDIR'
|
||||
this.errno = errnos.EISDIR
|
||||
this.syscall = 'rm'
|
||||
this.path = path
|
||||
this.message = `Path is a directory: ${this.syscall} returned ` +
|
||||
`${this.info.code} (is a directory) ${path}`
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
class ENOTDIR extends Error {
|
||||
constructor (path) {
|
||||
super()
|
||||
this.name = 'Error'
|
||||
this.code = 'ENOTDIR'
|
||||
this.errno = errnos.ENOTDIR
|
||||
this.syscall = 'rmdir'
|
||||
this.path = path
|
||||
this.message = `not a directory, ${this.syscall} '${this.path}'`
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name}: ${this.code}: ${this.message}`
|
||||
}
|
||||
}
|
||||
|
||||
// force is passed separately here because we respect it for the first entry
|
||||
// into rimraf only, any further calls that are spawned as a result (i.e. to
|
||||
// delete content within the target) will ignore ENOENT errors
|
||||
const rimraf = async (path, options, isTop = false) => {
|
||||
const force = isTop ? options.force : true
|
||||
const stat = await fs.lstat(path)
|
||||
.catch((err) => {
|
||||
// we only ignore ENOENT if we're forcing this call
|
||||
if (err.code === 'ENOENT' && force) {
|
||||
return
|
||||
}
|
||||
|
||||
if (isWindows && err.code === 'EPERM') {
|
||||
return fixEPERM(path, options, err, isTop)
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
|
||||
// no stat object here means either lstat threw an ENOENT, or lstat threw
|
||||
// an EPERM and the fixPERM function took care of things. either way, we're
|
||||
// already done, so return early
|
||||
if (!stat) {
|
||||
return
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
return rmdir(path, options, null, isTop)
|
||||
}
|
||||
|
||||
return fs.unlink(path)
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT' && force) {
|
||||
return
|
||||
}
|
||||
|
||||
if (err.code === 'EISDIR') {
|
||||
return rmdir(path, options, err, isTop)
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
// in windows, we handle this through fixEPERM which will also try to
|
||||
// delete things again. everywhere else since deleting the target as a
|
||||
// file didn't work we go ahead and try to delete it as a directory
|
||||
return isWindows
|
||||
? fixEPERM(path, options, err, isTop)
|
||||
: rmdir(path, options, err, isTop)
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
const fixEPERM = async (path, options, originalErr, isTop) => {
|
||||
const force = isTop ? options.force : true
|
||||
const targetMissing = await fs.chmod(path, 0o666)
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT' && force) {
|
||||
return true
|
||||
}
|
||||
|
||||
throw originalErr
|
||||
})
|
||||
|
||||
// got an ENOENT above, return now. no file = no problem
|
||||
if (targetMissing) {
|
||||
return
|
||||
}
|
||||
|
||||
// this function does its own lstat rather than calling rimraf again to avoid
|
||||
// infinite recursion for a repeating EPERM
|
||||
const stat = await fs.lstat(path)
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT' && force) {
|
||||
return
|
||||
}
|
||||
|
||||
throw originalErr
|
||||
})
|
||||
|
||||
if (!stat) {
|
||||
return
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
return rmdir(path, options, originalErr, isTop)
|
||||
}
|
||||
|
||||
return fs.unlink(path)
|
||||
}
|
||||
|
||||
const rmdir = async (path, options, originalErr, isTop) => {
|
||||
if (!options.recursive && isTop) {
|
||||
throw originalErr || new ERR_FS_EISDIR(path)
|
||||
}
|
||||
const force = isTop ? options.force : true
|
||||
|
||||
return fs.rmdir(path)
|
||||
.catch(async (err) => {
|
||||
// in Windows, calling rmdir on a file path will fail with ENOENT rather
|
||||
// than ENOTDIR. to determine if that's what happened, we have to do
|
||||
// another lstat on the path. if the path isn't actually gone, we throw
|
||||
// away the ENOENT and replace it with our own ENOTDIR
|
||||
if (isWindows && err.code === 'ENOENT') {
|
||||
const stillExists = await fs.lstat(path).then(() => true, () => false)
|
||||
if (stillExists) {
|
||||
err = new ENOTDIR(path)
|
||||
}
|
||||
}
|
||||
|
||||
// not there, not a problem
|
||||
if (err.code === 'ENOENT' && force) {
|
||||
return
|
||||
}
|
||||
|
||||
// we may not have originalErr if lstat tells us our target is a
|
||||
// directory but that changes before we actually remove it, so
|
||||
// only throw it here if it's set
|
||||
if (originalErr && err.code === 'ENOTDIR') {
|
||||
throw originalErr
|
||||
}
|
||||
|
||||
// the directory isn't empty, remove the contents and try again
|
||||
if (notEmptyCodes.has(err.code)) {
|
||||
const files = await fs.readdir(path)
|
||||
await Promise.all(files.map((file) => {
|
||||
const target = join(path, file)
|
||||
return rimraf(target, options)
|
||||
}))
|
||||
return fs.rmdir(path)
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
const rm = async (path, opts) => {
|
||||
const options = { ...defaultOptions, ...opts }
|
||||
let retries = 0
|
||||
|
||||
const errHandler = async (err) => {
|
||||
if (retryCodes.has(err.code) && ++retries < options.maxRetries) {
|
||||
const delay = retries * options.retryDelay
|
||||
await promiseTimeout(delay)
|
||||
return rimraf(path, options, true).catch(errHandler)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
return rimraf(path, options, true).catch(errHandler)
|
||||
}
|
||||
|
||||
const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms))
|
||||
|
||||
module.exports = rm
|
||||
39
node_modules/@npmcli/fs/lib/with-temp-dir.js
generated
vendored
Normal file
39
node_modules/@npmcli/fs/lib/with-temp-dir.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
const { join, sep } = require('path')
|
||||
|
||||
const getOptions = require('./common/get-options.js')
|
||||
const mkdir = require('./mkdir/index.js')
|
||||
const mkdtemp = require('./mkdtemp.js')
|
||||
const rm = require('./rm/index.js')
|
||||
|
||||
// create a temp directory, ensure its permissions match its parent, then call
|
||||
// the supplied function passing it the path to the directory. clean up after
|
||||
// the function finishes, whether it throws or not
|
||||
const withTempDir = async (root, fn, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['tmpPrefix'],
|
||||
})
|
||||
// create the directory, and fix its ownership
|
||||
await mkdir(root, { recursive: true, owner: 'inherit' })
|
||||
|
||||
const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' })
|
||||
let err
|
||||
let result
|
||||
|
||||
try {
|
||||
result = await fn(target)
|
||||
} catch (_err) {
|
||||
err = _err
|
||||
}
|
||||
|
||||
try {
|
||||
await rm(target, { force: true, recursive: true })
|
||||
} catch (err) {}
|
||||
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = withTempDir
|
||||
19
node_modules/@npmcli/fs/lib/write-file.js
generated
vendored
Normal file
19
node_modules/@npmcli/fs/lib/write-file.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
const fs = require('./fs.js')
|
||||
const getOptions = require('./common/get-options.js')
|
||||
const owner = require('./common/owner.js')
|
||||
|
||||
const writeFile = async (file, data, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['encoding', 'mode', 'flag', 'signal', 'owner'],
|
||||
wrap: 'encoding',
|
||||
})
|
||||
const { uid, gid } = await owner.validate(file, options.owner)
|
||||
|
||||
const result = await fs.writeFile(file, data, options)
|
||||
|
||||
await owner.update(file, uid, gid)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = writeFile
|
||||
38
node_modules/@npmcli/fs/package.json
generated
vendored
Normal file
38
node_modules/@npmcli/fs/package.json
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "@npmcli/fs",
|
||||
"version": "1.1.1",
|
||||
"description": "filesystem utilities for the npm cli",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"bin",
|
||||
"lib"
|
||||
],
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"snap": "tap",
|
||||
"test": "tap",
|
||||
"npmclilint": "npmcli-lint",
|
||||
"lint": "eslint '**/*.js'",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"posttest": "npm run lint",
|
||||
"postsnap": "npm run lintfix --",
|
||||
"postlint": "npm-template-check"
|
||||
},
|
||||
"keywords": [
|
||||
"npm",
|
||||
"oss"
|
||||
],
|
||||
"author": "GitHub Inc.",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@npmcli/template-oss": "^2.3.1",
|
||||
"tap": "^15.0.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"@gar/promisify": "^1.0.1",
|
||||
"semver": "^7.3.5"
|
||||
},
|
||||
"templateVersion": "2.3.1"
|
||||
}
|
||||
22
node_modules/@npmcli/move-file/LICENSE.md
generated
vendored
Normal file
22
node_modules/@npmcli/move-file/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
69
node_modules/@npmcli/move-file/README.md
generated
vendored
Normal file
69
node_modules/@npmcli/move-file/README.md
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
# @npmcli/move-file
|
||||
|
||||
A fork of [move-file](https://github.com/sindresorhus/move-file) with
|
||||
compatibility with all node 10.x versions.
|
||||
|
||||
> Move a file (or directory)
|
||||
|
||||
The built-in
|
||||
[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback)
|
||||
is just a JavaScript wrapper for the C `rename(2)` function, which doesn't
|
||||
support moving files across partitions or devices. This module is what you
|
||||
would have expected `fs.rename()` to be.
|
||||
|
||||
## Highlights
|
||||
|
||||
- Promise API.
|
||||
- Supports moving a file across partitions and devices.
|
||||
- Optionally prevent overwriting an existing file.
|
||||
- Creates non-existent destination directories for you.
|
||||
- Support for Node versions that lack built-in recursive `fs.mkdir()`
|
||||
- Automatically recurses when source is a directory.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install @npmcli/move-file
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const moveFile = require('@npmcli/move-file');
|
||||
|
||||
(async () => {
|
||||
await moveFile('source/unicorn.png', 'destination/unicorn.png');
|
||||
console.log('The file has been moved');
|
||||
})();
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### moveFile(source, destination, options?)
|
||||
|
||||
Returns a `Promise` that resolves when the file has been moved.
|
||||
|
||||
### moveFile.sync(source, destination, options?)
|
||||
|
||||
#### source
|
||||
|
||||
Type: `string`
|
||||
|
||||
File, or directory, you want to move.
|
||||
|
||||
#### destination
|
||||
|
||||
Type: `string`
|
||||
|
||||
Where you want the file or directory moved.
|
||||
|
||||
#### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
##### overwrite
|
||||
|
||||
Type: `boolean`\
|
||||
Default: `true`
|
||||
|
||||
Overwrite existing destination file(s).
|
||||
162
node_modules/@npmcli/move-file/index.js
generated
vendored
Normal file
162
node_modules/@npmcli/move-file/index.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
const { dirname, join, resolve, relative, isAbsolute } = require('path')
|
||||
const rimraf_ = require('rimraf')
|
||||
const { promisify } = require('util')
|
||||
const {
|
||||
access: access_,
|
||||
accessSync,
|
||||
copyFile: copyFile_,
|
||||
copyFileSync,
|
||||
unlink: unlink_,
|
||||
unlinkSync,
|
||||
readdir: readdir_,
|
||||
readdirSync,
|
||||
rename: rename_,
|
||||
renameSync,
|
||||
stat: stat_,
|
||||
statSync,
|
||||
lstat: lstat_,
|
||||
lstatSync,
|
||||
symlink: symlink_,
|
||||
symlinkSync,
|
||||
readlink: readlink_,
|
||||
readlinkSync
|
||||
} = require('fs')
|
||||
|
||||
const access = promisify(access_)
|
||||
const copyFile = promisify(copyFile_)
|
||||
const unlink = promisify(unlink_)
|
||||
const readdir = promisify(readdir_)
|
||||
const rename = promisify(rename_)
|
||||
const stat = promisify(stat_)
|
||||
const lstat = promisify(lstat_)
|
||||
const symlink = promisify(symlink_)
|
||||
const readlink = promisify(readlink_)
|
||||
const rimraf = promisify(rimraf_)
|
||||
const rimrafSync = rimraf_.sync
|
||||
|
||||
const mkdirp = require('mkdirp')
|
||||
|
||||
const pathExists = async path => {
|
||||
try {
|
||||
await access(path)
|
||||
return true
|
||||
} catch (er) {
|
||||
return er.code !== 'ENOENT'
|
||||
}
|
||||
}
|
||||
|
||||
const pathExistsSync = path => {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (er) {
|
||||
return er.code !== 'ENOENT'
|
||||
}
|
||||
}
|
||||
|
||||
const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
|
||||
if (!source || !destination) {
|
||||
throw new TypeError('`source` and `destination` file required')
|
||||
}
|
||||
|
||||
options = {
|
||||
overwrite: true,
|
||||
...options
|
||||
}
|
||||
|
||||
if (!options.overwrite && await pathExists(destination)) {
|
||||
throw new Error(`The destination file exists: ${destination}`)
|
||||
}
|
||||
|
||||
await mkdirp(dirname(destination))
|
||||
|
||||
try {
|
||||
await rename(source, destination)
|
||||
} catch (error) {
|
||||
if (error.code === 'EXDEV' || error.code === 'EPERM') {
|
||||
const sourceStat = await lstat(source)
|
||||
if (sourceStat.isDirectory()) {
|
||||
const files = await readdir(source)
|
||||
await Promise.all(files.map((file) => moveFile(join(source, file), join(destination, file), options, false, symlinks)))
|
||||
} else if (sourceStat.isSymbolicLink()) {
|
||||
symlinks.push({ source, destination })
|
||||
} else {
|
||||
await copyFile(source, destination)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (root) {
|
||||
await Promise.all(symlinks.map(async ({ source, destination }) => {
|
||||
let target = await readlink(source)
|
||||
// junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination
|
||||
if (isAbsolute(target))
|
||||
target = resolve(destination, relative(source, target))
|
||||
// try to determine what the actual file is so we can create the correct type of symlink in windows
|
||||
let targetStat
|
||||
try {
|
||||
targetStat = await stat(resolve(dirname(source), target))
|
||||
} catch (err) {}
|
||||
await symlink(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file')
|
||||
}))
|
||||
await rimraf(source)
|
||||
}
|
||||
}
|
||||
|
||||
const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => {
|
||||
if (!source || !destination) {
|
||||
throw new TypeError('`source` and `destination` file required')
|
||||
}
|
||||
|
||||
options = {
|
||||
overwrite: true,
|
||||
...options
|
||||
}
|
||||
|
||||
if (!options.overwrite && pathExistsSync(destination)) {
|
||||
throw new Error(`The destination file exists: ${destination}`)
|
||||
}
|
||||
|
||||
mkdirp.sync(dirname(destination))
|
||||
|
||||
try {
|
||||
renameSync(source, destination)
|
||||
} catch (error) {
|
||||
if (error.code === 'EXDEV' || error.code === 'EPERM') {
|
||||
const sourceStat = lstatSync(source)
|
||||
if (sourceStat.isDirectory()) {
|
||||
const files = readdirSync(source)
|
||||
for (const file of files) {
|
||||
moveFileSync(join(source, file), join(destination, file), options, false, symlinks)
|
||||
}
|
||||
} else if (sourceStat.isSymbolicLink()) {
|
||||
symlinks.push({ source, destination })
|
||||
} else {
|
||||
copyFileSync(source, destination)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (root) {
|
||||
for (const { source, destination } of symlinks) {
|
||||
let target = readlinkSync(source)
|
||||
// junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination
|
||||
if (isAbsolute(target))
|
||||
target = resolve(destination, relative(source, target))
|
||||
// try to determine what the actual file is so we can create the correct type of symlink in windows
|
||||
let targetStat
|
||||
try {
|
||||
targetStat = statSync(resolve(dirname(source), target))
|
||||
} catch (err) {}
|
||||
symlinkSync(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file')
|
||||
}
|
||||
rimrafSync(source)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = moveFile
|
||||
module.exports.sync = moveFileSync
|
||||
34
node_modules/@npmcli/move-file/package.json
generated
vendored
Normal file
34
node_modules/@npmcli/move-file/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "@npmcli/move-file",
|
||||
"version": "1.1.2",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"description": "move a file (fork of move-file)",
|
||||
"dependencies": {
|
||||
"mkdirp": "^1.0.4",
|
||||
"rimraf": "^3.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"require-inject": "^1.4.4",
|
||||
"tap": "^14.10.7"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/move-file"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
}
|
||||
6
node_modules/@serialport/binding-mock/.releaserc
generated
vendored
Normal file
6
node_modules/@serialport/binding-mock/.releaserc
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"branches": [
|
||||
"main",
|
||||
"next"
|
||||
]
|
||||
}
|
||||
21
node_modules/@serialport/binding-mock/LICENSE
generated
vendored
Normal file
21
node_modules/@serialport/binding-mock/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Francis Gulotta
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
9
node_modules/@serialport/binding-mock/README.md
generated
vendored
Normal file
9
node_modules/@serialport/binding-mock/README.md
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# @serialport/binding-mock
|
||||
|
||||
```ts
|
||||
import { MockBinding } from '@serialport/binding-mock'
|
||||
const MockBinding = new MockBinding()
|
||||
|
||||
MockBinding.createPort('/dev/fakePort', { echo: true })
|
||||
await MockBinding.write(Buffer.from('data')))
|
||||
```
|
||||
271
node_modules/@serialport/binding-mock/dist/index-esm.mjs
generated
vendored
Normal file
271
node_modules/@serialport/binding-mock/dist/index-esm.mjs
generated
vendored
Normal file
@@ -0,0 +1,271 @@
|
||||
import debugFactory from 'debug';
|
||||
|
||||
const debug = debugFactory('serialport/binding-mock');
|
||||
let ports = {};
|
||||
let serialNumber = 0;
|
||||
function resolveNextTick() {
|
||||
return new Promise(resolve => process.nextTick(() => resolve()));
|
||||
}
|
||||
class CanceledError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.canceled = true;
|
||||
}
|
||||
}
|
||||
const MockBinding = {
|
||||
reset() {
|
||||
ports = {};
|
||||
serialNumber = 0;
|
||||
},
|
||||
// Create a mock port
|
||||
createPort(path, options = {}) {
|
||||
serialNumber++;
|
||||
const optWithDefaults = Object.assign({ echo: false, record: false, manufacturer: 'The J5 Robotics Company', vendorId: undefined, productId: undefined, maxReadSize: 1024 }, options);
|
||||
ports[path] = {
|
||||
data: Buffer.alloc(0),
|
||||
echo: optWithDefaults.echo,
|
||||
record: optWithDefaults.record,
|
||||
readyData: optWithDefaults.readyData,
|
||||
maxReadSize: optWithDefaults.maxReadSize,
|
||||
info: {
|
||||
path,
|
||||
manufacturer: optWithDefaults.manufacturer,
|
||||
serialNumber: `${serialNumber}`,
|
||||
pnpId: undefined,
|
||||
locationId: undefined,
|
||||
vendorId: optWithDefaults.vendorId,
|
||||
productId: optWithDefaults.productId,
|
||||
},
|
||||
};
|
||||
debug(serialNumber, 'created port', JSON.stringify({ path, opt: options }));
|
||||
},
|
||||
async list() {
|
||||
debug(null, 'list');
|
||||
return Object.values(ports).map(port => port.info);
|
||||
},
|
||||
async open(options) {
|
||||
var _a;
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
if (!options.path) {
|
||||
throw new TypeError('"path" is not a valid port');
|
||||
}
|
||||
if (!options.baudRate) {
|
||||
throw new TypeError('"baudRate" is not a valid baudRate');
|
||||
}
|
||||
const openOptions = Object.assign({ dataBits: 8, lock: true, stopBits: 1, parity: 'none', rtscts: false, xon: false, xoff: false, xany: false, hupcl: true }, options);
|
||||
const { path } = openOptions;
|
||||
debug(null, `open: opening path ${path}`);
|
||||
const port = ports[path];
|
||||
await resolveNextTick();
|
||||
if (!port) {
|
||||
throw new Error(`Port does not exist - please call MockBinding.createPort('${path}') first`);
|
||||
}
|
||||
const serialNumber = port.info.serialNumber;
|
||||
if ((_a = port.openOpt) === null || _a === void 0 ? void 0 : _a.lock) {
|
||||
debug(serialNumber, 'open: Port is locked cannot open');
|
||||
throw new Error('Port is locked cannot open');
|
||||
}
|
||||
debug(serialNumber, `open: opened path ${path}`);
|
||||
port.openOpt = Object.assign({}, openOptions);
|
||||
return new MockPortBinding(port, openOptions);
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Mock bindings for pretend serialport access
|
||||
*/
|
||||
class MockPortBinding {
|
||||
constructor(port, openOptions) {
|
||||
this.port = port;
|
||||
this.openOptions = openOptions;
|
||||
this.pendingRead = null;
|
||||
this.isOpen = true;
|
||||
this.lastWrite = null;
|
||||
this.recording = Buffer.alloc(0);
|
||||
this.writeOperation = null; // in flight promise or null
|
||||
this.serialNumber = port.info.serialNumber;
|
||||
if (port.readyData) {
|
||||
const data = port.readyData;
|
||||
process.nextTick(() => {
|
||||
if (this.isOpen) {
|
||||
debug(this.serialNumber, 'emitting ready data');
|
||||
this.emitData(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// Emit data on a mock port
|
||||
emitData(data) {
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port must be open to pretend to receive data');
|
||||
}
|
||||
const bufferData = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
||||
debug(this.serialNumber, 'emitting data - pending read:', Boolean(this.pendingRead));
|
||||
this.port.data = Buffer.concat([this.port.data, bufferData]);
|
||||
if (this.pendingRead) {
|
||||
process.nextTick(this.pendingRead);
|
||||
this.pendingRead = null;
|
||||
}
|
||||
}
|
||||
async close() {
|
||||
debug(this.serialNumber, 'close');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
const port = this.port;
|
||||
if (!port) {
|
||||
throw new Error('already closed');
|
||||
}
|
||||
port.openOpt = undefined;
|
||||
// reset data on close
|
||||
port.data = Buffer.alloc(0);
|
||||
debug(this.serialNumber, 'port is closed');
|
||||
this.serialNumber = undefined;
|
||||
this.isOpen = false;
|
||||
if (this.pendingRead) {
|
||||
this.pendingRead(new CanceledError('port is closed'));
|
||||
}
|
||||
}
|
||||
async read(buffer, offset, length) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (typeof offset !== 'number' || isNaN(offset)) {
|
||||
throw new TypeError(`"offset" is not an integer got "${isNaN(offset) ? 'NaN' : typeof offset}"`);
|
||||
}
|
||||
if (typeof length !== 'number' || isNaN(length)) {
|
||||
throw new TypeError(`"length" is not an integer got "${isNaN(length) ? 'NaN' : typeof length}"`);
|
||||
}
|
||||
if (buffer.length < offset + length) {
|
||||
throw new Error('buffer is too small');
|
||||
}
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
debug(this.serialNumber, 'read', length, 'bytes');
|
||||
await resolveNextTick();
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new CanceledError('Read canceled');
|
||||
}
|
||||
if (this.port.data.length <= 0) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.pendingRead = err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
this.read(buffer, offset, length).then(resolve, reject);
|
||||
};
|
||||
});
|
||||
}
|
||||
const lengthToRead = this.port.maxReadSize > length ? length : this.port.maxReadSize;
|
||||
const data = this.port.data.slice(0, lengthToRead);
|
||||
const bytesRead = data.copy(buffer, offset);
|
||||
this.port.data = this.port.data.slice(lengthToRead);
|
||||
debug(this.serialNumber, 'read', bytesRead, 'bytes');
|
||||
return { bytesRead, buffer };
|
||||
}
|
||||
async write(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (!this.isOpen || !this.port) {
|
||||
debug('write', 'error port is not open');
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
debug(this.serialNumber, 'write', buffer.length, 'bytes');
|
||||
if (this.writeOperation) {
|
||||
throw new Error('Overlapping writes are not supported and should be queued by the serialport object');
|
||||
}
|
||||
this.writeOperation = (async () => {
|
||||
await resolveNextTick();
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Write canceled');
|
||||
}
|
||||
const data = (this.lastWrite = Buffer.from(buffer)); // copy
|
||||
if (this.port.record) {
|
||||
this.recording = Buffer.concat([this.recording, data]);
|
||||
}
|
||||
if (this.port.echo) {
|
||||
process.nextTick(() => {
|
||||
if (this.isOpen) {
|
||||
this.emitData(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.writeOperation = null;
|
||||
debug(this.serialNumber, 'writing finished');
|
||||
})();
|
||||
return this.writeOperation;
|
||||
}
|
||||
async update(options) {
|
||||
if (typeof options !== 'object') {
|
||||
throw TypeError('"options" is not an object');
|
||||
}
|
||||
if (typeof options.baudRate !== 'number') {
|
||||
throw new TypeError('"options.baudRate" is not a number');
|
||||
}
|
||||
debug(this.serialNumber, 'update');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
if (this.port.openOpt) {
|
||||
this.port.openOpt.baudRate = options.baudRate;
|
||||
}
|
||||
}
|
||||
async set(options) {
|
||||
if (typeof options !== 'object') {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
debug(this.serialNumber, 'set');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
}
|
||||
async get() {
|
||||
debug(this.serialNumber, 'get');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
return {
|
||||
cts: true,
|
||||
dsr: false,
|
||||
dcd: false,
|
||||
};
|
||||
}
|
||||
async getBaudRate() {
|
||||
var _a;
|
||||
debug(this.serialNumber, 'getBaudRate');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
if (!((_a = this.port.openOpt) === null || _a === void 0 ? void 0 : _a.baudRate)) {
|
||||
throw new Error('Internal Error');
|
||||
}
|
||||
return {
|
||||
baudRate: this.port.openOpt.baudRate,
|
||||
};
|
||||
}
|
||||
async flush() {
|
||||
debug(this.serialNumber, 'flush');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
this.port.data = Buffer.alloc(0);
|
||||
}
|
||||
async drain() {
|
||||
debug(this.serialNumber, 'drain');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await this.writeOperation;
|
||||
await resolveNextTick();
|
||||
}
|
||||
}
|
||||
|
||||
export { CanceledError, MockBinding, MockPortBinding };
|
||||
73
node_modules/@serialport/binding-mock/dist/index.d.ts
generated
vendored
Normal file
73
node_modules/@serialport/binding-mock/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
/// <reference types="node" />
|
||||
|
||||
import { BindingInterface } from '@serialport/bindings-interface';
|
||||
import { BindingPortInterface } from '@serialport/bindings-interface';
|
||||
import { OpenOptions } from '@serialport/bindings-interface';
|
||||
import { PortInfo } from '@serialport/bindings-interface';
|
||||
import { PortStatus } from '@serialport/bindings-interface';
|
||||
import { SetOptions } from '@serialport/bindings-interface';
|
||||
import { UpdateOptions } from '@serialport/bindings-interface';
|
||||
|
||||
export declare class CanceledError extends Error {
|
||||
canceled: true;
|
||||
constructor(message: string);
|
||||
}
|
||||
|
||||
export declare interface CreatePortOptions {
|
||||
echo?: boolean;
|
||||
record?: boolean;
|
||||
readyData?: Buffer;
|
||||
maxReadSize?: number;
|
||||
manufacturer?: string;
|
||||
vendorId?: string;
|
||||
productId?: string;
|
||||
}
|
||||
|
||||
export declare const MockBinding: MockBindingInterface;
|
||||
|
||||
export declare interface MockBindingInterface extends BindingInterface<MockPortBinding> {
|
||||
reset(): void;
|
||||
createPort(path: string, opt?: CreatePortOptions): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock bindings for pretend serialport access
|
||||
*/
|
||||
export declare class MockPortBinding implements BindingPortInterface {
|
||||
readonly openOptions: Required<OpenOptions>;
|
||||
readonly port: MockPortInternal;
|
||||
private pendingRead;
|
||||
lastWrite: null | Buffer;
|
||||
recording: Buffer;
|
||||
writeOperation: null | Promise<void>;
|
||||
isOpen: boolean;
|
||||
serialNumber?: string;
|
||||
constructor(port: MockPortInternal, openOptions: Required<OpenOptions>);
|
||||
emitData(data: Buffer | string): void;
|
||||
close(): Promise<void>;
|
||||
read(buffer: Buffer, offset: number, length: number): Promise<{
|
||||
buffer: Buffer;
|
||||
bytesRead: number;
|
||||
}>;
|
||||
write(buffer: Buffer): Promise<void>;
|
||||
update(options: UpdateOptions): Promise<void>;
|
||||
set(options: SetOptions): Promise<void>;
|
||||
get(): Promise<PortStatus>;
|
||||
getBaudRate(): Promise<{
|
||||
baudRate: number;
|
||||
}>;
|
||||
flush(): Promise<void>;
|
||||
drain(): Promise<void>;
|
||||
}
|
||||
|
||||
export declare interface MockPortInternal {
|
||||
data: Buffer;
|
||||
echo: boolean;
|
||||
record: boolean;
|
||||
info: PortInfo;
|
||||
maxReadSize: number;
|
||||
readyData?: Buffer;
|
||||
openOpt?: OpenOptions;
|
||||
}
|
||||
|
||||
export { }
|
||||
281
node_modules/@serialport/binding-mock/dist/index.js
generated
vendored
Normal file
281
node_modules/@serialport/binding-mock/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,281 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var debugFactory = require('debug');
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var debugFactory__default = /*#__PURE__*/_interopDefaultLegacy(debugFactory);
|
||||
|
||||
const debug = debugFactory__default["default"]('serialport/binding-mock');
|
||||
let ports = {};
|
||||
let serialNumber = 0;
|
||||
function resolveNextTick() {
|
||||
return new Promise(resolve => process.nextTick(() => resolve()));
|
||||
}
|
||||
class CanceledError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.canceled = true;
|
||||
}
|
||||
}
|
||||
const MockBinding = {
|
||||
reset() {
|
||||
ports = {};
|
||||
serialNumber = 0;
|
||||
},
|
||||
// Create a mock port
|
||||
createPort(path, options = {}) {
|
||||
serialNumber++;
|
||||
const optWithDefaults = Object.assign({ echo: false, record: false, manufacturer: 'The J5 Robotics Company', vendorId: undefined, productId: undefined, maxReadSize: 1024 }, options);
|
||||
ports[path] = {
|
||||
data: Buffer.alloc(0),
|
||||
echo: optWithDefaults.echo,
|
||||
record: optWithDefaults.record,
|
||||
readyData: optWithDefaults.readyData,
|
||||
maxReadSize: optWithDefaults.maxReadSize,
|
||||
info: {
|
||||
path,
|
||||
manufacturer: optWithDefaults.manufacturer,
|
||||
serialNumber: `${serialNumber}`,
|
||||
pnpId: undefined,
|
||||
locationId: undefined,
|
||||
vendorId: optWithDefaults.vendorId,
|
||||
productId: optWithDefaults.productId,
|
||||
},
|
||||
};
|
||||
debug(serialNumber, 'created port', JSON.stringify({ path, opt: options }));
|
||||
},
|
||||
async list() {
|
||||
debug(null, 'list');
|
||||
return Object.values(ports).map(port => port.info);
|
||||
},
|
||||
async open(options) {
|
||||
var _a;
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
if (!options.path) {
|
||||
throw new TypeError('"path" is not a valid port');
|
||||
}
|
||||
if (!options.baudRate) {
|
||||
throw new TypeError('"baudRate" is not a valid baudRate');
|
||||
}
|
||||
const openOptions = Object.assign({ dataBits: 8, lock: true, stopBits: 1, parity: 'none', rtscts: false, xon: false, xoff: false, xany: false, hupcl: true }, options);
|
||||
const { path } = openOptions;
|
||||
debug(null, `open: opening path ${path}`);
|
||||
const port = ports[path];
|
||||
await resolveNextTick();
|
||||
if (!port) {
|
||||
throw new Error(`Port does not exist - please call MockBinding.createPort('${path}') first`);
|
||||
}
|
||||
const serialNumber = port.info.serialNumber;
|
||||
if ((_a = port.openOpt) === null || _a === void 0 ? void 0 : _a.lock) {
|
||||
debug(serialNumber, 'open: Port is locked cannot open');
|
||||
throw new Error('Port is locked cannot open');
|
||||
}
|
||||
debug(serialNumber, `open: opened path ${path}`);
|
||||
port.openOpt = Object.assign({}, openOptions);
|
||||
return new MockPortBinding(port, openOptions);
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Mock bindings for pretend serialport access
|
||||
*/
|
||||
class MockPortBinding {
|
||||
constructor(port, openOptions) {
|
||||
this.port = port;
|
||||
this.openOptions = openOptions;
|
||||
this.pendingRead = null;
|
||||
this.isOpen = true;
|
||||
this.lastWrite = null;
|
||||
this.recording = Buffer.alloc(0);
|
||||
this.writeOperation = null; // in flight promise or null
|
||||
this.serialNumber = port.info.serialNumber;
|
||||
if (port.readyData) {
|
||||
const data = port.readyData;
|
||||
process.nextTick(() => {
|
||||
if (this.isOpen) {
|
||||
debug(this.serialNumber, 'emitting ready data');
|
||||
this.emitData(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// Emit data on a mock port
|
||||
emitData(data) {
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port must be open to pretend to receive data');
|
||||
}
|
||||
const bufferData = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
||||
debug(this.serialNumber, 'emitting data - pending read:', Boolean(this.pendingRead));
|
||||
this.port.data = Buffer.concat([this.port.data, bufferData]);
|
||||
if (this.pendingRead) {
|
||||
process.nextTick(this.pendingRead);
|
||||
this.pendingRead = null;
|
||||
}
|
||||
}
|
||||
async close() {
|
||||
debug(this.serialNumber, 'close');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
const port = this.port;
|
||||
if (!port) {
|
||||
throw new Error('already closed');
|
||||
}
|
||||
port.openOpt = undefined;
|
||||
// reset data on close
|
||||
port.data = Buffer.alloc(0);
|
||||
debug(this.serialNumber, 'port is closed');
|
||||
this.serialNumber = undefined;
|
||||
this.isOpen = false;
|
||||
if (this.pendingRead) {
|
||||
this.pendingRead(new CanceledError('port is closed'));
|
||||
}
|
||||
}
|
||||
async read(buffer, offset, length) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (typeof offset !== 'number' || isNaN(offset)) {
|
||||
throw new TypeError(`"offset" is not an integer got "${isNaN(offset) ? 'NaN' : typeof offset}"`);
|
||||
}
|
||||
if (typeof length !== 'number' || isNaN(length)) {
|
||||
throw new TypeError(`"length" is not an integer got "${isNaN(length) ? 'NaN' : typeof length}"`);
|
||||
}
|
||||
if (buffer.length < offset + length) {
|
||||
throw new Error('buffer is too small');
|
||||
}
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
debug(this.serialNumber, 'read', length, 'bytes');
|
||||
await resolveNextTick();
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new CanceledError('Read canceled');
|
||||
}
|
||||
if (this.port.data.length <= 0) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.pendingRead = err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
this.read(buffer, offset, length).then(resolve, reject);
|
||||
};
|
||||
});
|
||||
}
|
||||
const lengthToRead = this.port.maxReadSize > length ? length : this.port.maxReadSize;
|
||||
const data = this.port.data.slice(0, lengthToRead);
|
||||
const bytesRead = data.copy(buffer, offset);
|
||||
this.port.data = this.port.data.slice(lengthToRead);
|
||||
debug(this.serialNumber, 'read', bytesRead, 'bytes');
|
||||
return { bytesRead, buffer };
|
||||
}
|
||||
async write(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (!this.isOpen || !this.port) {
|
||||
debug('write', 'error port is not open');
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
debug(this.serialNumber, 'write', buffer.length, 'bytes');
|
||||
if (this.writeOperation) {
|
||||
throw new Error('Overlapping writes are not supported and should be queued by the serialport object');
|
||||
}
|
||||
this.writeOperation = (async () => {
|
||||
await resolveNextTick();
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Write canceled');
|
||||
}
|
||||
const data = (this.lastWrite = Buffer.from(buffer)); // copy
|
||||
if (this.port.record) {
|
||||
this.recording = Buffer.concat([this.recording, data]);
|
||||
}
|
||||
if (this.port.echo) {
|
||||
process.nextTick(() => {
|
||||
if (this.isOpen) {
|
||||
this.emitData(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.writeOperation = null;
|
||||
debug(this.serialNumber, 'writing finished');
|
||||
})();
|
||||
return this.writeOperation;
|
||||
}
|
||||
async update(options) {
|
||||
if (typeof options !== 'object') {
|
||||
throw TypeError('"options" is not an object');
|
||||
}
|
||||
if (typeof options.baudRate !== 'number') {
|
||||
throw new TypeError('"options.baudRate" is not a number');
|
||||
}
|
||||
debug(this.serialNumber, 'update');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
if (this.port.openOpt) {
|
||||
this.port.openOpt.baudRate = options.baudRate;
|
||||
}
|
||||
}
|
||||
async set(options) {
|
||||
if (typeof options !== 'object') {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
debug(this.serialNumber, 'set');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
}
|
||||
async get() {
|
||||
debug(this.serialNumber, 'get');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
return {
|
||||
cts: true,
|
||||
dsr: false,
|
||||
dcd: false,
|
||||
};
|
||||
}
|
||||
async getBaudRate() {
|
||||
var _a;
|
||||
debug(this.serialNumber, 'getBaudRate');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
if (!((_a = this.port.openOpt) === null || _a === void 0 ? void 0 : _a.baudRate)) {
|
||||
throw new Error('Internal Error');
|
||||
}
|
||||
return {
|
||||
baudRate: this.port.openOpt.baudRate,
|
||||
};
|
||||
}
|
||||
async flush() {
|
||||
debug(this.serialNumber, 'flush');
|
||||
if (!this.isOpen || !this.port) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await resolveNextTick();
|
||||
this.port.data = Buffer.alloc(0);
|
||||
}
|
||||
async drain() {
|
||||
debug(this.serialNumber, 'drain');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await this.writeOperation;
|
||||
await resolveNextTick();
|
||||
}
|
||||
}
|
||||
|
||||
exports.CanceledError = CanceledError;
|
||||
exports.MockBinding = MockBinding;
|
||||
exports.MockPortBinding = MockPortBinding;
|
||||
58
node_modules/@serialport/binding-mock/package.json
generated
vendored
Normal file
58
node_modules/@serialport/binding-mock/package.json
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"name": "@serialport/binding-mock",
|
||||
"version": "10.2.2",
|
||||
"description": "The mock serialport bindings",
|
||||
"types": "./dist/index.d.ts",
|
||||
"main": "./dist/index.js",
|
||||
"exports": {
|
||||
"require": "./dist/index.js",
|
||||
"default": "./dist/index-esm.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"repository": "git@github.com:serialport/binding-mock.git",
|
||||
"homepage": "https://github.com/serialport/binding-mock",
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"lint": "tsc && eslint lib/**/*.ts",
|
||||
"format": "eslint lib/**/*.ts --fix",
|
||||
"clean": "rm -rf dist-ts dist",
|
||||
"build": "npm run clean && tsc -p tsconfig-build.json && rollup -c && node -r esbuild-register bundle-types",
|
||||
"prepublishOnly": "npm run build",
|
||||
"semantic-release": "semantic-release"
|
||||
},
|
||||
"keywords": [
|
||||
"serialport-binding",
|
||||
"debug"
|
||||
],
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@microsoft/api-extractor": "7.19.4",
|
||||
"@types/chai": "4.3.0",
|
||||
"@types/mocha": "9.1.0",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.2",
|
||||
"@typescript-eslint/parser": "5.10.2",
|
||||
"chai": "4.3.6",
|
||||
"esbuild": "0.14.18",
|
||||
"esbuild-register": "3.3.2",
|
||||
"eslint": "8.8.0",
|
||||
"mocha": "9.2.0",
|
||||
"rollup": "2.67.0",
|
||||
"rollup-plugin-node-resolve": "5.2.0",
|
||||
"semantic-release": "19.0.2",
|
||||
"typescript": "4.5.5"
|
||||
},
|
||||
"mocha": {
|
||||
"bail": true,
|
||||
"require": [
|
||||
"esbuild-register"
|
||||
],
|
||||
"spec": "lib/**/*-test.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@serialport/bindings-interface": "^1.2.1",
|
||||
"debug": "^4.3.3"
|
||||
}
|
||||
}
|
||||
21
node_modules/@serialport/bindings-cpp/LICENSE
generated
vendored
Normal file
21
node_modules/@serialport/bindings-cpp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2010 Christopher Williams. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
95
node_modules/@serialport/bindings-cpp/README.md
generated
vendored
Normal file
95
node_modules/@serialport/bindings-cpp/README.md
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# @serialport/bindings-cpp
|
||||
|
||||
[](#backers)
|
||||
[](#sponsors)
|
||||
[](https://codecov.io/gh/serialport/bindings-cpp)
|
||||
[](https://github.com/serialport/bindings-cpp/actions/workflows/test.yml)
|
||||
|
||||
Access serial ports with JavaScript. Linux, OSX and Windows. Welcome your robotic JavaScript overlords. Better yet, program them!
|
||||
|
||||
> Go to https://serialport.io/ to learn more, find guides and api documentation.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- 📚 [**Guides**](https://serialport.io/docs/)
|
||||
- [**API Docs**](https://serialport.io/docs/api-serialport)
|
||||
- [`@serialport/bindings-cpp`](https://www.npmjs.com/package/@serialport/bindings-cpp)
|
||||
- 🐛 [Help and Bugs](https://github.com/serialport/node-serialport/issues/new/choose) All serialport issues are pointed to the main serialport repo.
|
||||
|
||||
### Bindings
|
||||
|
||||
The Bindings provide a low level interface to work with your serialport. It is possible to use them alone but it's usually easier to use them with an interface.
|
||||
|
||||
- [`@serialport/bindings-cpp`](https://serialport.io/docs/api-bindings-cpp) bindings for Linux, Mac and Windows
|
||||
- [`@serialport/binding-interface`](https://serialport.io/docs/api-bindings-interface) as an interface to use if you're making your own bindings
|
||||
- [`@serialport/binding-mock`](https://serialport.io/docs/api-binding-mock) for a mock binding package for testing
|
||||
|
||||
## Developing
|
||||
|
||||
### Developing node serialport projects
|
||||
|
||||
1. Clone this repo `git clone git@github.com:serialport/bindings-cpp.git`
|
||||
1. Run `npm install` to setup local package dependencies (run this any time you depend on a package local to this repo)
|
||||
1. Run `npm test` to ensure everything is working properly
|
||||
1. If you have a serial loopback device (TX to RX) you can run run `TEST_PORT=/path/to/port npm test` for a more comprehensive test suite. (Defaults to 115200 baud customize with the TEST_BAUD env.) You can use an arduino with the `test/arduino-echo` sketch.
|
||||
|
||||
### Developing Docs
|
||||
|
||||
See https://github.com/serialport/website
|
||||
|
||||
## License
|
||||
|
||||
SerialPort packages are all [MIT licensed](LICENSE) and all it's dependencies are MIT licensed.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
SerialPort follows the [Nodebots Code of Conduct](http://nodebots.io/conduct.html). While the code is MIT licensed participation in the community has some rules to make this a good place to work and learn.
|
||||
|
||||
### TLDR
|
||||
|
||||
- Be respectful.
|
||||
- Abusive behavior is never tolerated.
|
||||
- Data published to NodeBots is hosted at the discretion of the service administrators, and may be removed.
|
||||
- Don't build evil robots.
|
||||
- Violations of this code may result in swift and permanent expulsion from the NodeBots community.
|
||||
|
||||
## Governance and Community
|
||||
|
||||
SerialPort is currently employees a [governance](https://medium.com/the-node-js-collection/healthy-open-source-967fa8be7951) with a group of maintainers, committers and contributors, all fixing bugs and adding features and improving documentation. You need not apply to work on SerialPort, all are welcome to join, build, and maintain this project.
|
||||
|
||||
- A Contributor is any individual creating or commenting on an issue or pull request. By participating, this is you.
|
||||
- Committers are contributors who have been given write access to the repository. They can review and merge pull requests.
|
||||
- Maintainers are committers representing the required technical expertise to resolve rare disputes.
|
||||
|
||||
If you have a PR that improves the project people in any or all of the above people will help you land it.
|
||||
|
||||
**Maintainers**
|
||||
|
||||
- [Francis Gulotta](https://twitter.com/reconbot) | [reconbot](https://github.com/reconbot)
|
||||
- [Nick Hehr](https://twitter.com/hipsterbrown) | [hipsterbrown](https://github.com/hipsterbrown)
|
||||
|
||||
### Contributors
|
||||
|
||||
This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
|
||||
<a href="https://github.com/serialport/node-serialport/graphs/contributors"><img src="https://opencollective.com/serialport/contributors.svg?width=890&button=false" /></a>
|
||||
|
||||
### Backers
|
||||
|
||||
Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/serialport#backer)]
|
||||
|
||||
<a href="https://opencollective.com/serialport#backers" target="_blank"><img src="https://opencollective.com/serialport/backers.svg?width=890"></a>
|
||||
|
||||
### Sponsors
|
||||
|
||||
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/serialport#sponsor)]
|
||||
|
||||
<!-- <a href="https://opencollective.com/serialport/sponsor/0/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/1/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/2/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/3/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/4/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/5/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/6/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/7/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/8/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/serialport/sponsor/9/website" target="_blank"><img src="https://opencollective.com/serialport/sponsor/9/avatar.svg"></a> -->
|
||||
80
node_modules/@serialport/bindings-cpp/binding.gyp
generated
vendored
Normal file
80
node_modules/@serialport/bindings-cpp/binding.gyp
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
'variables': {
|
||||
'openssl_fips': ''
|
||||
},
|
||||
'targets': [{
|
||||
'target_name': 'bindings',
|
||||
'sources': [
|
||||
'src/serialport.cpp'
|
||||
],
|
||||
'include_dirs': ["<!(node -p \"require('node-addon-api').include_dir\")"],
|
||||
'cflags!': [ '-fno-exceptions' ],
|
||||
'cflags_cc!': [ '-fno-exceptions' ],
|
||||
"defines": ["NAPI_CPP_EXCEPTIONS"],
|
||||
'conditions': [
|
||||
['OS=="win"',
|
||||
{
|
||||
'defines': ['CHECK_NODE_MODULE_VERSION'],
|
||||
'sources': [
|
||||
'src/serialport_win.cpp'
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'ExceptionHandling': '1',
|
||||
'DisableSpecificWarnings': [ '4530', '4506' ],
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
['OS=="mac"',
|
||||
{
|
||||
'sources': [
|
||||
'src/serialport_unix.cpp',
|
||||
'src/poller.cpp',
|
||||
'src/darwin_list.cpp'
|
||||
],
|
||||
'xcode_settings': {
|
||||
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
|
||||
'MACOSX_DEPLOYMENT_TARGET': '10.9',
|
||||
'OTHER_CFLAGS': [
|
||||
'-arch x86_64',
|
||||
'-arch arm64'
|
||||
],
|
||||
'OTHER_LDFLAGS': [
|
||||
'-framework CoreFoundation',
|
||||
'-framework IOKit',
|
||||
'-arch x86_64',
|
||||
'-arch arm64'
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
['OS=="linux"',
|
||||
{
|
||||
'sources': [
|
||||
'src/serialport_unix.cpp',
|
||||
'src/poller.cpp',
|
||||
'src/serialport_linux.cpp'
|
||||
]
|
||||
}
|
||||
],
|
||||
['OS=="android"',
|
||||
{
|
||||
'sources': [
|
||||
'src/serialport_unix.cpp',
|
||||
'src/poller.cpp',
|
||||
'src/serialport_linux.cpp'
|
||||
]
|
||||
}
|
||||
],
|
||||
['OS!="win"',
|
||||
{
|
||||
'sources': [
|
||||
'src/serialport_unix.cpp',
|
||||
'src/poller.cpp'
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
}],
|
||||
}
|
||||
39
node_modules/@serialport/bindings-cpp/dist/darwin.d.ts
generated
vendored
Normal file
39
node_modules/@serialport/bindings-cpp/dist/darwin.d.ts
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
/// <reference types="node" />
|
||||
import { BindingPortInterface } from '.';
|
||||
import { BindingInterface, OpenOptions, PortStatus, SetOptions, UpdateOptions } from '@serialport/bindings-interface';
|
||||
import { Poller } from './poller';
|
||||
export interface DarwinOpenOptions extends OpenOptions {
|
||||
/** Defaults to none */
|
||||
parity?: 'none' | 'even' | 'odd';
|
||||
/** see [`man termios`](http://linux.die.net/man/3/termios) defaults to 1 */
|
||||
vmin?: number;
|
||||
/** see [`man termios`](http://linux.die.net/man/3/termios) defaults to 0 */
|
||||
vtime?: number;
|
||||
}
|
||||
export type DarwinBindingInterface = BindingInterface<DarwinPortBinding, DarwinOpenOptions>;
|
||||
export declare const DarwinBinding: DarwinBindingInterface;
|
||||
/**
|
||||
* The Darwin binding layer for OSX
|
||||
*/
|
||||
export declare class DarwinPortBinding implements BindingPortInterface {
|
||||
readonly openOptions: Required<DarwinOpenOptions>;
|
||||
readonly poller: Poller;
|
||||
private writeOperation;
|
||||
fd: null | number;
|
||||
constructor(fd: number, options: Required<DarwinOpenOptions>);
|
||||
get isOpen(): boolean;
|
||||
close(): Promise<void>;
|
||||
read(buffer: Buffer, offset: number, length: number): Promise<{
|
||||
buffer: Buffer;
|
||||
bytesRead: number;
|
||||
}>;
|
||||
write(buffer: Buffer): Promise<void>;
|
||||
update(options: UpdateOptions): Promise<void>;
|
||||
set(options: SetOptions): Promise<void>;
|
||||
get(): Promise<PortStatus>;
|
||||
getBaudRate(): Promise<{
|
||||
baudRate: number;
|
||||
}>;
|
||||
flush(): Promise<void>;
|
||||
drain(): Promise<void>;
|
||||
}
|
||||
148
node_modules/@serialport/bindings-cpp/dist/darwin.js
generated
vendored
Normal file
148
node_modules/@serialport/bindings-cpp/dist/darwin.js
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DarwinPortBinding = exports.DarwinBinding = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const load_bindings_1 = require("./load-bindings");
|
||||
const poller_1 = require("./poller");
|
||||
const unix_read_1 = require("./unix-read");
|
||||
const unix_write_1 = require("./unix-write");
|
||||
const debug = (0, debug_1.default)('serialport/bindings-cpp');
|
||||
exports.DarwinBinding = {
|
||||
list() {
|
||||
debug('list');
|
||||
return (0, load_bindings_1.asyncList)();
|
||||
},
|
||||
async open(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
if (!options.path) {
|
||||
throw new TypeError('"path" is not a valid port');
|
||||
}
|
||||
if (!options.baudRate) {
|
||||
throw new TypeError('"baudRate" is not a valid baudRate');
|
||||
}
|
||||
debug('open');
|
||||
const openOptions = Object.assign({ vmin: 1, vtime: 0, dataBits: 8, lock: true, stopBits: 1, parity: 'none', rtscts: false, xon: false, xoff: false, xany: false, hupcl: true }, options);
|
||||
const fd = await (0, load_bindings_1.asyncOpen)(openOptions.path, openOptions);
|
||||
return new DarwinPortBinding(fd, openOptions);
|
||||
},
|
||||
};
|
||||
/**
|
||||
* The Darwin binding layer for OSX
|
||||
*/
|
||||
class DarwinPortBinding {
|
||||
constructor(fd, options) {
|
||||
this.fd = fd;
|
||||
this.openOptions = options;
|
||||
this.poller = new poller_1.Poller(fd);
|
||||
this.writeOperation = null;
|
||||
}
|
||||
get isOpen() {
|
||||
return this.fd !== null;
|
||||
}
|
||||
async close() {
|
||||
debug('close');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
const fd = this.fd;
|
||||
this.poller.stop();
|
||||
this.poller.destroy();
|
||||
this.fd = null;
|
||||
await (0, load_bindings_1.asyncClose)(fd);
|
||||
}
|
||||
async read(buffer, offset, length) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (typeof offset !== 'number' || isNaN(offset)) {
|
||||
throw new TypeError(`"offset" is not an integer got "${isNaN(offset) ? 'NaN' : typeof offset}"`);
|
||||
}
|
||||
if (typeof length !== 'number' || isNaN(length)) {
|
||||
throw new TypeError(`"length" is not an integer got "${isNaN(length) ? 'NaN' : typeof length}"`);
|
||||
}
|
||||
debug('read');
|
||||
if (buffer.length < offset + length) {
|
||||
throw new Error('buffer is too small');
|
||||
}
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, unix_read_1.unixRead)({ binding: this, buffer, offset, length });
|
||||
}
|
||||
async write(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
debug('write', buffer.length, 'bytes');
|
||||
if (!this.isOpen) {
|
||||
debug('write', 'error port is not open');
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
this.writeOperation = (async () => {
|
||||
if (buffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
await (0, unix_write_1.unixWrite)({ binding: this, buffer });
|
||||
this.writeOperation = null;
|
||||
})();
|
||||
return this.writeOperation;
|
||||
}
|
||||
async update(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw TypeError('"options" is not an object');
|
||||
}
|
||||
if (typeof options.baudRate !== 'number') {
|
||||
throw new TypeError('"options.baudRate" is not a number');
|
||||
}
|
||||
debug('update');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncUpdate)(this.fd, options);
|
||||
}
|
||||
async set(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
debug('set', options);
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncSet)(this.fd, options);
|
||||
}
|
||||
async get() {
|
||||
debug('get');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, load_bindings_1.asyncGet)(this.fd);
|
||||
}
|
||||
async getBaudRate() {
|
||||
debug('getBaudRate');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
throw new Error('getBaudRate is not implemented on darwin');
|
||||
}
|
||||
async flush() {
|
||||
debug('flush');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncFlush)(this.fd);
|
||||
}
|
||||
async drain() {
|
||||
debug('drain');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await this.writeOperation;
|
||||
await (0, load_bindings_1.asyncDrain)(this.fd);
|
||||
}
|
||||
}
|
||||
exports.DarwinPortBinding = DarwinPortBinding;
|
||||
7
node_modules/@serialport/bindings-cpp/dist/errors.d.ts
generated
vendored
Normal file
7
node_modules/@serialport/bindings-cpp/dist/errors.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { BindingsErrorInterface } from '@serialport/bindings-interface';
|
||||
export declare class BindingsError extends Error implements BindingsErrorInterface {
|
||||
canceled: boolean;
|
||||
constructor(message: string, { canceled }?: {
|
||||
canceled?: boolean | undefined;
|
||||
});
|
||||
}
|
||||
10
node_modules/@serialport/bindings-cpp/dist/errors.js
generated
vendored
Normal file
10
node_modules/@serialport/bindings-cpp/dist/errors.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BindingsError = void 0;
|
||||
class BindingsError extends Error {
|
||||
constructor(message, { canceled = false } = {}) {
|
||||
super(message);
|
||||
this.canceled = canceled;
|
||||
}
|
||||
}
|
||||
exports.BindingsError = BindingsError;
|
||||
13
node_modules/@serialport/bindings-cpp/dist/index.d.ts
generated
vendored
Normal file
13
node_modules/@serialport/bindings-cpp/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
import { DarwinBindingInterface } from './darwin';
|
||||
import { LinuxBindingInterface } from './linux';
|
||||
import { WindowsBindingInterface } from './win32';
|
||||
export * from '@serialport/bindings-interface';
|
||||
export * from './darwin';
|
||||
export * from './linux';
|
||||
export * from './win32';
|
||||
export * from './errors';
|
||||
export type AutoDetectTypes = DarwinBindingInterface | WindowsBindingInterface | LinuxBindingInterface;
|
||||
/**
|
||||
* This is an auto detected binding for your current platform
|
||||
*/
|
||||
export declare function autoDetect(): AutoDetectTypes;
|
||||
48
node_modules/@serialport/bindings-cpp/dist/index.js
generated
vendored
Normal file
48
node_modules/@serialport/bindings-cpp/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.autoDetect = void 0;
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const darwin_1 = require("./darwin");
|
||||
const linux_1 = require("./linux");
|
||||
const win32_1 = require("./win32");
|
||||
const debug = (0, debug_1.default)('serialport/bindings-cpp');
|
||||
__exportStar(require("@serialport/bindings-interface"), exports);
|
||||
__exportStar(require("./darwin"), exports);
|
||||
__exportStar(require("./linux"), exports);
|
||||
__exportStar(require("./win32"), exports);
|
||||
__exportStar(require("./errors"), exports);
|
||||
/**
|
||||
* This is an auto detected binding for your current platform
|
||||
*/
|
||||
function autoDetect() {
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
debug('loading WindowsBinding');
|
||||
return win32_1.WindowsBinding;
|
||||
case 'darwin':
|
||||
debug('loading DarwinBinding');
|
||||
return darwin_1.DarwinBinding;
|
||||
default:
|
||||
debug('loading LinuxBinding');
|
||||
return linux_1.LinuxBinding;
|
||||
}
|
||||
}
|
||||
exports.autoDetect = autoDetect;
|
||||
4
node_modules/@serialport/bindings-cpp/dist/linux-list.d.ts
generated
vendored
Normal file
4
node_modules/@serialport/bindings-cpp/dist/linux-list.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/// <reference types="node" />
|
||||
import { spawn } from 'child_process';
|
||||
import { PortInfo } from '@serialport/bindings-interface';
|
||||
export declare function linuxList(spawnCmd?: typeof spawn): Promise<PortInfo[]>;
|
||||
115
node_modules/@serialport/bindings-cpp/dist/linux-list.js
generated
vendored
Normal file
115
node_modules/@serialport/bindings-cpp/dist/linux-list.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.linuxList = void 0;
|
||||
const child_process_1 = require("child_process");
|
||||
const parser_readline_1 = require("@serialport/parser-readline");
|
||||
// get only serial port names
|
||||
function checkPathOfDevice(path) {
|
||||
return /(tty(S|WCH|ACM|USB|AMA|MFD|O|XRUSB)|rfcomm)/.test(path) && path;
|
||||
}
|
||||
function propName(name) {
|
||||
return {
|
||||
DEVNAME: 'path',
|
||||
ID_VENDOR_ENC: 'manufacturer',
|
||||
ID_SERIAL_SHORT: 'serialNumber',
|
||||
ID_VENDOR_ID: 'vendorId',
|
||||
ID_MODEL_ID: 'productId',
|
||||
DEVLINKS: 'pnpId',
|
||||
/**
|
||||
* Workaround for systemd defect
|
||||
* see https://github.com/serialport/bindings-cpp/issues/115
|
||||
*/
|
||||
ID_USB_VENDOR_ENC: 'manufacturer',
|
||||
ID_USB_SERIAL_SHORT: 'serialNumber',
|
||||
ID_USB_VENDOR_ID: 'vendorId',
|
||||
ID_USB_MODEL_ID: 'productId',
|
||||
// End of workaround
|
||||
}[name.toUpperCase()];
|
||||
}
|
||||
function decodeHexEscape(str) {
|
||||
return str.replace(/\\x([a-fA-F0-9]{2})/g, (a, b) => {
|
||||
return String.fromCharCode(parseInt(b, 16));
|
||||
});
|
||||
}
|
||||
function propVal(name, val) {
|
||||
if (name === 'pnpId') {
|
||||
const match = val.match(/\/by-id\/([^\s]+)/);
|
||||
return (match === null || match === void 0 ? void 0 : match[1]) || undefined;
|
||||
}
|
||||
if (name === 'manufacturer') {
|
||||
return decodeHexEscape(val);
|
||||
}
|
||||
if (/^0x/.test(val)) {
|
||||
return val.substr(2);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
function linuxList(spawnCmd = child_process_1.spawn) {
|
||||
const ports = [];
|
||||
const udevadm = spawnCmd('udevadm', ['info', '-e']);
|
||||
const lines = udevadm.stdout.pipe(new parser_readline_1.ReadlineParser());
|
||||
let skipPort = false;
|
||||
let port = {
|
||||
path: '',
|
||||
manufacturer: undefined,
|
||||
serialNumber: undefined,
|
||||
pnpId: undefined,
|
||||
locationId: undefined,
|
||||
vendorId: undefined,
|
||||
productId: undefined,
|
||||
};
|
||||
lines.on('data', (line) => {
|
||||
const lineType = line.slice(0, 1);
|
||||
const data = line.slice(3);
|
||||
// new port entry
|
||||
if (lineType === 'P') {
|
||||
port = {
|
||||
path: '',
|
||||
manufacturer: undefined,
|
||||
serialNumber: undefined,
|
||||
pnpId: undefined,
|
||||
locationId: undefined,
|
||||
vendorId: undefined,
|
||||
productId: undefined,
|
||||
};
|
||||
skipPort = false;
|
||||
return;
|
||||
}
|
||||
if (skipPort) {
|
||||
return;
|
||||
}
|
||||
// Check dev name and save port if it matches flag to skip the rest of the data if not
|
||||
if (lineType === 'N') {
|
||||
if (checkPathOfDevice(data)) {
|
||||
ports.push(port);
|
||||
}
|
||||
else {
|
||||
skipPort = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
// parse data about each port
|
||||
if (lineType === 'E') {
|
||||
const keyValue = data.match(/^(.+)=(.*)/);
|
||||
if (!keyValue) {
|
||||
return;
|
||||
}
|
||||
const key = propName(keyValue[1]);
|
||||
if (!key) {
|
||||
return;
|
||||
}
|
||||
port[key] = propVal(key, keyValue[2]);
|
||||
}
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
udevadm.on('close', (code) => {
|
||||
if (code) {
|
||||
reject(new Error(`Error listing ports udevadm exited with error code: ${code}`));
|
||||
}
|
||||
});
|
||||
udevadm.on('error', reject);
|
||||
lines.on('error', reject);
|
||||
lines.on('finish', () => resolve(ports));
|
||||
});
|
||||
}
|
||||
exports.linuxList = linuxList;
|
||||
46
node_modules/@serialport/bindings-cpp/dist/linux.d.ts
generated
vendored
Normal file
46
node_modules/@serialport/bindings-cpp/dist/linux.d.ts
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
/// <reference types="node" />
|
||||
import { Poller } from './poller';
|
||||
import { BindingInterface, OpenOptions, PortStatus, SetOptions, UpdateOptions } from '@serialport/bindings-interface';
|
||||
import { BindingPortInterface } from '.';
|
||||
export interface LinuxOpenOptions extends OpenOptions {
|
||||
/** Defaults to none */
|
||||
parity?: 'none' | 'even' | 'odd';
|
||||
/** see [`man termios`](http://linux.die.net/man/3/termios) defaults to 1 */
|
||||
vmin?: number;
|
||||
/** see [`man termios`](http://linux.die.net/man/3/termios) defaults to 0 */
|
||||
vtime?: number;
|
||||
}
|
||||
export interface LinuxPortStatus extends PortStatus {
|
||||
lowLatency: boolean;
|
||||
}
|
||||
export interface LinuxSetOptions extends SetOptions {
|
||||
/** Low latency mode */
|
||||
lowLatency?: boolean;
|
||||
}
|
||||
export type LinuxBindingInterface = BindingInterface<LinuxPortBinding, LinuxOpenOptions>;
|
||||
export declare const LinuxBinding: LinuxBindingInterface;
|
||||
/**
|
||||
* The linux binding layer
|
||||
*/
|
||||
export declare class LinuxPortBinding implements BindingPortInterface {
|
||||
readonly openOptions: Required<LinuxOpenOptions>;
|
||||
readonly poller: Poller;
|
||||
private writeOperation;
|
||||
fd: number | null;
|
||||
constructor(fd: number, openOptions: Required<LinuxOpenOptions>);
|
||||
get isOpen(): boolean;
|
||||
close(): Promise<void>;
|
||||
read(buffer: Buffer, offset: number, length: number): Promise<{
|
||||
buffer: Buffer;
|
||||
bytesRead: number;
|
||||
}>;
|
||||
write(buffer: Buffer): Promise<void>;
|
||||
update(options: UpdateOptions): Promise<void>;
|
||||
set(options: LinuxSetOptions): Promise<void>;
|
||||
get(): Promise<LinuxPortStatus>;
|
||||
getBaudRate(): Promise<{
|
||||
baudRate: number;
|
||||
}>;
|
||||
flush(): Promise<void>;
|
||||
drain(): Promise<void>;
|
||||
}
|
||||
150
node_modules/@serialport/bindings-cpp/dist/linux.js
generated
vendored
Normal file
150
node_modules/@serialport/bindings-cpp/dist/linux.js
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.LinuxPortBinding = exports.LinuxBinding = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const linux_list_1 = require("./linux-list");
|
||||
const poller_1 = require("./poller");
|
||||
const unix_read_1 = require("./unix-read");
|
||||
const unix_write_1 = require("./unix-write");
|
||||
const load_bindings_1 = require("./load-bindings");
|
||||
const debug = (0, debug_1.default)('serialport/bindings-cpp');
|
||||
exports.LinuxBinding = {
|
||||
list() {
|
||||
debug('list');
|
||||
return (0, linux_list_1.linuxList)();
|
||||
},
|
||||
async open(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
if (!options.path) {
|
||||
throw new TypeError('"path" is not a valid port');
|
||||
}
|
||||
if (!options.baudRate) {
|
||||
throw new TypeError('"baudRate" is not a valid baudRate');
|
||||
}
|
||||
debug('open');
|
||||
const openOptions = Object.assign({ vmin: 1, vtime: 0, dataBits: 8, lock: true, stopBits: 1, parity: 'none', rtscts: false, xon: false, xoff: false, xany: false, hupcl: true }, options);
|
||||
const fd = await (0, load_bindings_1.asyncOpen)(openOptions.path, openOptions);
|
||||
this.fd = fd;
|
||||
return new LinuxPortBinding(fd, openOptions);
|
||||
},
|
||||
};
|
||||
/**
|
||||
* The linux binding layer
|
||||
*/
|
||||
class LinuxPortBinding {
|
||||
constructor(fd, openOptions) {
|
||||
this.fd = fd;
|
||||
this.openOptions = openOptions;
|
||||
this.poller = new poller_1.Poller(fd);
|
||||
this.writeOperation = null;
|
||||
}
|
||||
get isOpen() {
|
||||
return this.fd !== null;
|
||||
}
|
||||
async close() {
|
||||
debug('close');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
const fd = this.fd;
|
||||
this.poller.stop();
|
||||
this.poller.destroy();
|
||||
this.fd = null;
|
||||
await (0, load_bindings_1.asyncClose)(fd);
|
||||
}
|
||||
async read(buffer, offset, length) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (typeof offset !== 'number' || isNaN(offset)) {
|
||||
throw new TypeError(`"offset" is not an integer got "${isNaN(offset) ? 'NaN' : typeof offset}"`);
|
||||
}
|
||||
if (typeof length !== 'number' || isNaN(length)) {
|
||||
throw new TypeError(`"length" is not an integer got "${isNaN(length) ? 'NaN' : typeof length}"`);
|
||||
}
|
||||
debug('read');
|
||||
if (buffer.length < offset + length) {
|
||||
throw new Error('buffer is too small');
|
||||
}
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, unix_read_1.unixRead)({ binding: this, buffer, offset, length });
|
||||
}
|
||||
async write(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
debug('write', buffer.length, 'bytes');
|
||||
if (!this.isOpen) {
|
||||
debug('write', 'error port is not open');
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
this.writeOperation = (async () => {
|
||||
if (buffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
await (0, unix_write_1.unixWrite)({ binding: this, buffer });
|
||||
this.writeOperation = null;
|
||||
})();
|
||||
return this.writeOperation;
|
||||
}
|
||||
async update(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw TypeError('"options" is not an object');
|
||||
}
|
||||
if (typeof options.baudRate !== 'number') {
|
||||
throw new TypeError('"options.baudRate" is not a number');
|
||||
}
|
||||
debug('update');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncUpdate)(this.fd, options);
|
||||
}
|
||||
async set(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
debug('set');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncSet)(this.fd, options);
|
||||
}
|
||||
async get() {
|
||||
debug('get');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, load_bindings_1.asyncGet)(this.fd);
|
||||
}
|
||||
async getBaudRate() {
|
||||
debug('getBaudRate');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, load_bindings_1.asyncGetBaudRate)(this.fd);
|
||||
}
|
||||
async flush() {
|
||||
debug('flush');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncFlush)(this.fd);
|
||||
}
|
||||
async drain() {
|
||||
debug('drain');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await this.writeOperation;
|
||||
await (0, load_bindings_1.asyncDrain)(this.fd);
|
||||
}
|
||||
}
|
||||
exports.LinuxPortBinding = LinuxPortBinding;
|
||||
11
node_modules/@serialport/bindings-cpp/dist/load-bindings.d.ts
generated
vendored
Normal file
11
node_modules/@serialport/bindings-cpp/dist/load-bindings.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export declare const asyncClose: Function;
|
||||
export declare const asyncDrain: Function;
|
||||
export declare const asyncFlush: Function;
|
||||
export declare const asyncGet: Function;
|
||||
export declare const asyncGetBaudRate: Function;
|
||||
export declare const asyncList: Function;
|
||||
export declare const asyncOpen: Function;
|
||||
export declare const asyncSet: Function;
|
||||
export declare const asyncUpdate: Function;
|
||||
export declare const asyncRead: Function;
|
||||
export declare const asyncWrite: Function;
|
||||
22
node_modules/@serialport/bindings-cpp/dist/load-bindings.js
generated
vendored
Normal file
22
node_modules/@serialport/bindings-cpp/dist/load-bindings.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.asyncWrite = exports.asyncRead = exports.asyncUpdate = exports.asyncSet = exports.asyncOpen = exports.asyncList = exports.asyncGetBaudRate = exports.asyncGet = exports.asyncFlush = exports.asyncDrain = exports.asyncClose = void 0;
|
||||
const node_gyp_build_1 = __importDefault(require("node-gyp-build"));
|
||||
const util_1 = require("util");
|
||||
const path_1 = require("path");
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const binding = (0, node_gyp_build_1.default)((0, path_1.join)(__dirname, '../'));
|
||||
exports.asyncClose = binding.close ? (0, util_1.promisify)(binding.close) : async () => { throw new Error('"binding.close" Method not implemented'); };
|
||||
exports.asyncDrain = binding.drain ? (0, util_1.promisify)(binding.drain) : async () => { throw new Error('"binding.drain" Method not implemented'); };
|
||||
exports.asyncFlush = binding.flush ? (0, util_1.promisify)(binding.flush) : async () => { throw new Error('"binding.flush" Method not implemented'); };
|
||||
exports.asyncGet = binding.get ? (0, util_1.promisify)(binding.get) : async () => { throw new Error('"binding.get" Method not implemented'); };
|
||||
exports.asyncGetBaudRate = binding.getBaudRate ? (0, util_1.promisify)(binding.getBaudRate) : async () => { throw new Error('"binding.getBaudRate" Method not implemented'); };
|
||||
exports.asyncList = binding.list ? (0, util_1.promisify)(binding.list) : async () => { throw new Error('"binding.list" Method not implemented'); };
|
||||
exports.asyncOpen = binding.open ? (0, util_1.promisify)(binding.open) : async () => { throw new Error('"binding.open" Method not implemented'); };
|
||||
exports.asyncSet = binding.set ? (0, util_1.promisify)(binding.set) : async () => { throw new Error('"binding.set" Method not implemented'); };
|
||||
exports.asyncUpdate = binding.update ? (0, util_1.promisify)(binding.update) : async () => { throw new Error('"binding.update" Method not implemented'); };
|
||||
exports.asyncRead = binding.read ? (0, util_1.promisify)(binding.read) : async () => { throw new Error('"binding.read" Method not implemented'); };
|
||||
exports.asyncWrite = binding.write ? (0, util_1.promisify)(binding.write) : async () => { throw new Error('"binding.write" Method not implemented'); };
|
||||
40
node_modules/@serialport/bindings-cpp/dist/poller.d.ts
generated
vendored
Normal file
40
node_modules/@serialport/bindings-cpp/dist/poller.d.ts
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
/// <reference types="node" />
|
||||
import { EventEmitter } from 'events';
|
||||
interface PollerClass {
|
||||
new (fd: number, cb: (err: Error, flag: number) => void): PollerInstance;
|
||||
}
|
||||
interface PollerInstance {
|
||||
poll(flag: number): void;
|
||||
stop(): void;
|
||||
destroy(): void;
|
||||
}
|
||||
export declare const EVENTS: {
|
||||
UV_READABLE: number;
|
||||
UV_WRITABLE: number;
|
||||
UV_DISCONNECT: number;
|
||||
};
|
||||
/**
|
||||
* Polls unix systems for readable or writable states of a file or serialport
|
||||
*/
|
||||
export declare class Poller extends EventEmitter {
|
||||
poller: PollerInstance;
|
||||
constructor(fd: number, FDPoller?: PollerClass);
|
||||
/**
|
||||
* Wait for the next event to occur
|
||||
* @param {string} event ('readable'|'writable'|'disconnect')
|
||||
* @returns {Poller} returns itself
|
||||
*/
|
||||
once(event: 'readable' | 'writable' | 'disconnect', callback: (err: null | Error) => void): this;
|
||||
/**
|
||||
* Ask the bindings to listen for an event, it is recommend to use `.once()` for easy use
|
||||
* @param {EVENTS} eventFlag polls for an event or group of events based upon a flag.
|
||||
*/
|
||||
poll(eventFlag?: number): void;
|
||||
/**
|
||||
* Stop listening for events and cancel all outstanding listening with an error
|
||||
*/
|
||||
stop(): void;
|
||||
destroy(): void;
|
||||
emitCanceled(): void;
|
||||
}
|
||||
export {};
|
||||
104
node_modules/@serialport/bindings-cpp/dist/poller.js
generated
vendored
Normal file
104
node_modules/@serialport/bindings-cpp/dist/poller.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Poller = exports.EVENTS = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const events_1 = require("events");
|
||||
const path_1 = require("path");
|
||||
const node_gyp_build_1 = __importDefault(require("node-gyp-build"));
|
||||
const errors_1 = require("./errors");
|
||||
const { Poller: PollerBindings } = (0, node_gyp_build_1.default)((0, path_1.join)(__dirname, '../'));
|
||||
const logger = (0, debug_1.default)('serialport/bindings-cpp/poller');
|
||||
exports.EVENTS = {
|
||||
UV_READABLE: 0b0001,
|
||||
UV_WRITABLE: 0b0010,
|
||||
UV_DISCONNECT: 0b0100,
|
||||
};
|
||||
function handleEvent(error, eventFlag) {
|
||||
if (error) {
|
||||
logger('error', error);
|
||||
this.emit('readable', error);
|
||||
this.emit('writable', error);
|
||||
this.emit('disconnect', error);
|
||||
return;
|
||||
}
|
||||
if (eventFlag & exports.EVENTS.UV_READABLE) {
|
||||
logger('received "readable"');
|
||||
this.emit('readable', null);
|
||||
}
|
||||
if (eventFlag & exports.EVENTS.UV_WRITABLE) {
|
||||
logger('received "writable"');
|
||||
this.emit('writable', null);
|
||||
}
|
||||
if (eventFlag & exports.EVENTS.UV_DISCONNECT) {
|
||||
logger('received "disconnect"');
|
||||
this.emit('disconnect', null);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Polls unix systems for readable or writable states of a file or serialport
|
||||
*/
|
||||
class Poller extends events_1.EventEmitter {
|
||||
constructor(fd, FDPoller = PollerBindings) {
|
||||
logger('Creating poller');
|
||||
super();
|
||||
this.poller = new FDPoller(fd, handleEvent.bind(this));
|
||||
}
|
||||
/**
|
||||
* Wait for the next event to occur
|
||||
* @param {string} event ('readable'|'writable'|'disconnect')
|
||||
* @returns {Poller} returns itself
|
||||
*/
|
||||
once(event, callback) {
|
||||
switch (event) {
|
||||
case 'readable':
|
||||
this.poll(exports.EVENTS.UV_READABLE);
|
||||
break;
|
||||
case 'writable':
|
||||
this.poll(exports.EVENTS.UV_WRITABLE);
|
||||
break;
|
||||
case 'disconnect':
|
||||
this.poll(exports.EVENTS.UV_DISCONNECT);
|
||||
break;
|
||||
}
|
||||
return super.once(event, callback);
|
||||
}
|
||||
/**
|
||||
* Ask the bindings to listen for an event, it is recommend to use `.once()` for easy use
|
||||
* @param {EVENTS} eventFlag polls for an event or group of events based upon a flag.
|
||||
*/
|
||||
poll(eventFlag = 0) {
|
||||
if (eventFlag & exports.EVENTS.UV_READABLE) {
|
||||
logger('Polling for "readable"');
|
||||
}
|
||||
if (eventFlag & exports.EVENTS.UV_WRITABLE) {
|
||||
logger('Polling for "writable"');
|
||||
}
|
||||
if (eventFlag & exports.EVENTS.UV_DISCONNECT) {
|
||||
logger('Polling for "disconnect"');
|
||||
}
|
||||
this.poller.poll(eventFlag);
|
||||
}
|
||||
/**
|
||||
* Stop listening for events and cancel all outstanding listening with an error
|
||||
*/
|
||||
stop() {
|
||||
logger('Stopping poller');
|
||||
this.poller.stop();
|
||||
this.emitCanceled();
|
||||
}
|
||||
destroy() {
|
||||
logger('Destroying poller');
|
||||
this.poller.destroy();
|
||||
this.emitCanceled();
|
||||
}
|
||||
emitCanceled() {
|
||||
const err = new errors_1.BindingsError('Canceled', { canceled: true });
|
||||
this.emit('readable', err);
|
||||
this.emit('writable', err);
|
||||
this.emit('disconnect', err);
|
||||
}
|
||||
}
|
||||
exports.Poller = Poller;
|
||||
18
node_modules/@serialport/bindings-cpp/dist/unix-read.d.ts
generated
vendored
Normal file
18
node_modules/@serialport/bindings-cpp/dist/unix-read.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { read as fsRead } from 'fs';
|
||||
import { LinuxPortBinding } from './linux';
|
||||
import { DarwinPortBinding } from './darwin';
|
||||
declare const readAsync: typeof fsRead.__promisify__;
|
||||
interface UnixReadOptions {
|
||||
binding: LinuxPortBinding | DarwinPortBinding;
|
||||
buffer: Buffer;
|
||||
offset: number;
|
||||
length: number;
|
||||
fsReadAsync?: typeof readAsync;
|
||||
}
|
||||
export declare const unixRead: ({ binding, buffer, offset, length, fsReadAsync, }: UnixReadOptions) => Promise<{
|
||||
buffer: Buffer;
|
||||
bytesRead: number;
|
||||
}>;
|
||||
export {};
|
||||
55
node_modules/@serialport/bindings-cpp/dist/unix-read.js
generated
vendored
Normal file
55
node_modules/@serialport/bindings-cpp/dist/unix-read.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.unixRead = void 0;
|
||||
const util_1 = require("util");
|
||||
const fs_1 = require("fs");
|
||||
const errors_1 = require("./errors");
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const logger = (0, debug_1.default)('serialport/bindings-cpp/unixRead');
|
||||
const readAsync = (0, util_1.promisify)(fs_1.read);
|
||||
const readable = (binding) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!binding.poller) {
|
||||
throw new Error('No poller on bindings');
|
||||
}
|
||||
binding.poller.once('readable', err => (err ? reject(err) : resolve()));
|
||||
});
|
||||
};
|
||||
const unixRead = async ({ binding, buffer, offset, length, fsReadAsync = readAsync, }) => {
|
||||
logger('Starting read');
|
||||
if (!binding.isOpen || !binding.fd) {
|
||||
throw new errors_1.BindingsError('Port is not open', { canceled: true });
|
||||
}
|
||||
try {
|
||||
const { bytesRead } = await fsReadAsync(binding.fd, buffer, offset, length, null);
|
||||
if (bytesRead === 0) {
|
||||
return (0, exports.unixRead)({ binding, buffer, offset, length, fsReadAsync });
|
||||
}
|
||||
logger('Finished read', bytesRead, 'bytes');
|
||||
return { bytesRead, buffer };
|
||||
}
|
||||
catch (err) {
|
||||
logger('read error', err);
|
||||
if (err.code === 'EAGAIN' || err.code === 'EWOULDBLOCK' || err.code === 'EINTR') {
|
||||
if (!binding.isOpen) {
|
||||
throw new errors_1.BindingsError('Port is not open', { canceled: true });
|
||||
}
|
||||
logger('waiting for readable because of code:', err.code);
|
||||
await readable(binding);
|
||||
return (0, exports.unixRead)({ binding, buffer, offset, length, fsReadAsync });
|
||||
}
|
||||
const disconnectError = err.code === 'EBADF' || // Bad file number means we got closed
|
||||
err.code === 'ENXIO' || // No such device or address probably usb disconnect
|
||||
err.code === 'UNKNOWN' ||
|
||||
err.errno === -1; // generic error
|
||||
if (disconnectError) {
|
||||
err.disconnect = true;
|
||||
logger('disconnecting', err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
exports.unixRead = unixRead;
|
||||
14
node_modules/@serialport/bindings-cpp/dist/unix-write.d.ts
generated
vendored
Normal file
14
node_modules/@serialport/bindings-cpp/dist/unix-write.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { write } from 'fs';
|
||||
import { LinuxPortBinding } from './linux';
|
||||
import { DarwinPortBinding } from './darwin';
|
||||
declare const writeAsync: typeof write.__promisify__;
|
||||
interface UnixWriteOptions {
|
||||
binding: LinuxPortBinding | DarwinPortBinding;
|
||||
buffer: Buffer;
|
||||
offset?: number;
|
||||
fsWriteAsync?: typeof writeAsync;
|
||||
}
|
||||
export declare const unixWrite: ({ binding, buffer, offset, fsWriteAsync }: UnixWriteOptions) => Promise<void>;
|
||||
export {};
|
||||
56
node_modules/@serialport/bindings-cpp/dist/unix-write.js
generated
vendored
Normal file
56
node_modules/@serialport/bindings-cpp/dist/unix-write.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.unixWrite = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const util_1 = require("util");
|
||||
const logger = (0, debug_1.default)('serialport/bindings-cpp/unixWrite');
|
||||
const writeAsync = (0, util_1.promisify)(fs_1.write);
|
||||
const writable = (binding) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
binding.poller.once('writable', err => (err ? reject(err) : resolve()));
|
||||
});
|
||||
};
|
||||
const unixWrite = async ({ binding, buffer, offset = 0, fsWriteAsync = writeAsync }) => {
|
||||
const bytesToWrite = buffer.length - offset;
|
||||
logger('Starting write', buffer.length, 'bytes offset', offset, 'bytesToWrite', bytesToWrite);
|
||||
if (!binding.isOpen || !binding.fd) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
try {
|
||||
const { bytesWritten } = await fsWriteAsync(binding.fd, buffer, offset, bytesToWrite);
|
||||
logger('write returned: wrote', bytesWritten, 'bytes');
|
||||
if (bytesWritten + offset < buffer.length) {
|
||||
if (!binding.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, exports.unixWrite)({ binding, buffer, offset: bytesWritten + offset, fsWriteAsync });
|
||||
}
|
||||
logger('Finished writing', bytesWritten + offset, 'bytes');
|
||||
}
|
||||
catch (err) {
|
||||
logger('write errored', err);
|
||||
if (err.code === 'EAGAIN' || err.code === 'EWOULDBLOCK' || err.code === 'EINTR') {
|
||||
if (!binding.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
logger('waiting for writable because of code:', err.code);
|
||||
await writable(binding);
|
||||
return (0, exports.unixWrite)({ binding, buffer, offset, fsWriteAsync });
|
||||
}
|
||||
const disconnectError = err.code === 'EBADF' || // Bad file number means we got closed
|
||||
err.code === 'ENXIO' || // No such device or address probably usb disconnect
|
||||
err.code === 'UNKNOWN' ||
|
||||
err.errno === -1; // generic error
|
||||
if (disconnectError) {
|
||||
err.disconnect = true;
|
||||
logger('disconnecting', err);
|
||||
}
|
||||
logger('error', err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
exports.unixWrite = unixWrite;
|
||||
1
node_modules/@serialport/bindings-cpp/dist/win32-sn-parser.d.ts
generated
vendored
Normal file
1
node_modules/@serialport/bindings-cpp/dist/win32-sn-parser.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const serialNumParser: (pnpId?: string) => string | null;
|
||||
17
node_modules/@serialport/bindings-cpp/dist/win32-sn-parser.js
generated
vendored
Normal file
17
node_modules/@serialport/bindings-cpp/dist/win32-sn-parser.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.serialNumParser = void 0;
|
||||
const PARSERS = [/USB\\(?:.+)\\(.+)/, /FTDIBUS\\(?:.+)\+(.+?)A?\\.+/];
|
||||
const serialNumParser = (pnpId) => {
|
||||
if (!pnpId) {
|
||||
return null;
|
||||
}
|
||||
for (const parser of PARSERS) {
|
||||
const sn = pnpId.match(parser);
|
||||
if (sn) {
|
||||
return sn[1];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
exports.serialNumParser = serialNumParser;
|
||||
35
node_modules/@serialport/bindings-cpp/dist/win32.d.ts
generated
vendored
Normal file
35
node_modules/@serialport/bindings-cpp/dist/win32.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/// <reference types="node" />
|
||||
import { BindingPortInterface } from '.';
|
||||
import { BindingInterface, OpenOptions, PortStatus, SetOptions, UpdateOptions } from '@serialport/bindings-interface';
|
||||
export interface WindowsOpenOptions extends OpenOptions {
|
||||
/** Device parity defaults to none */
|
||||
parity?: 'none' | 'even' | 'odd' | 'mark' | 'space';
|
||||
/** RTS mode defaults to handshake */
|
||||
rtsMode?: 'handshake' | 'enable' | 'toggle';
|
||||
}
|
||||
export type WindowsBindingInterface = BindingInterface<WindowsPortBinding, WindowsOpenOptions>;
|
||||
export declare const WindowsBinding: WindowsBindingInterface;
|
||||
/**
|
||||
* The Windows binding layer
|
||||
*/
|
||||
export declare class WindowsPortBinding implements BindingPortInterface {
|
||||
fd: null | number;
|
||||
writeOperation: Promise<void> | null;
|
||||
openOptions: Required<OpenOptions>;
|
||||
constructor(fd: number, options: Required<OpenOptions>);
|
||||
get isOpen(): boolean;
|
||||
close(): Promise<void>;
|
||||
read(buffer: Buffer, offset: number, length: number): Promise<{
|
||||
buffer: Buffer;
|
||||
bytesRead: number;
|
||||
}>;
|
||||
write(buffer: Buffer): Promise<void>;
|
||||
update(options: UpdateOptions): Promise<void>;
|
||||
set(options: SetOptions): Promise<void>;
|
||||
get(): Promise<PortStatus>;
|
||||
getBaudRate(): Promise<{
|
||||
baudRate: number;
|
||||
}>;
|
||||
flush(): Promise<void>;
|
||||
drain(): Promise<void>;
|
||||
}
|
||||
162
node_modules/@serialport/bindings-cpp/dist/win32.js
generated
vendored
Normal file
162
node_modules/@serialport/bindings-cpp/dist/win32.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WindowsPortBinding = exports.WindowsBinding = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const _1 = require(".");
|
||||
const load_bindings_1 = require("./load-bindings");
|
||||
const win32_sn_parser_1 = require("./win32-sn-parser");
|
||||
const debug = (0, debug_1.default)('serialport/bindings-cpp');
|
||||
exports.WindowsBinding = {
|
||||
async list() {
|
||||
const ports = await (0, load_bindings_1.asyncList)();
|
||||
// Grab the serial number from the pnp id
|
||||
return ports.map(port => {
|
||||
if (port.pnpId && !port.serialNumber) {
|
||||
const serialNumber = (0, win32_sn_parser_1.serialNumParser)(port.pnpId);
|
||||
if (serialNumber) {
|
||||
return Object.assign(Object.assign({}, port), { serialNumber });
|
||||
}
|
||||
}
|
||||
return port;
|
||||
});
|
||||
},
|
||||
async open(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
if (!options.path) {
|
||||
throw new TypeError('"path" is not a valid port');
|
||||
}
|
||||
if (!options.baudRate) {
|
||||
throw new TypeError('"baudRate" is not a valid baudRate');
|
||||
}
|
||||
debug('open');
|
||||
const openOptions = Object.assign({ dataBits: 8, lock: true, stopBits: 1, parity: 'none', rtscts: false, rtsMode: 'handshake', xon: false, xoff: false, xany: false, hupcl: true }, options);
|
||||
const fd = await (0, load_bindings_1.asyncOpen)(openOptions.path, openOptions);
|
||||
return new WindowsPortBinding(fd, openOptions);
|
||||
},
|
||||
};
|
||||
/**
|
||||
* The Windows binding layer
|
||||
*/
|
||||
class WindowsPortBinding {
|
||||
constructor(fd, options) {
|
||||
this.fd = fd;
|
||||
this.openOptions = options;
|
||||
this.writeOperation = null;
|
||||
}
|
||||
get isOpen() {
|
||||
return this.fd !== null;
|
||||
}
|
||||
async close() {
|
||||
debug('close');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
const fd = this.fd;
|
||||
this.fd = null;
|
||||
await (0, load_bindings_1.asyncClose)(fd);
|
||||
}
|
||||
async read(buffer, offset, length) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
if (typeof offset !== 'number' || isNaN(offset)) {
|
||||
throw new TypeError(`"offset" is not an integer got "${isNaN(offset) ? 'NaN' : typeof offset}"`);
|
||||
}
|
||||
if (typeof length !== 'number' || isNaN(length)) {
|
||||
throw new TypeError(`"length" is not an integer got "${isNaN(length) ? 'NaN' : typeof length}"`);
|
||||
}
|
||||
debug('read');
|
||||
if (buffer.length < offset + length) {
|
||||
throw new Error('buffer is too small');
|
||||
}
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
try {
|
||||
const bytesRead = await (0, load_bindings_1.asyncRead)(this.fd, buffer, offset, length);
|
||||
return { bytesRead, buffer };
|
||||
}
|
||||
catch (err) {
|
||||
if (!this.isOpen) {
|
||||
throw new _1.BindingsError(err.message, { canceled: true });
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
async write(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
throw new TypeError('"buffer" is not a Buffer');
|
||||
}
|
||||
debug('write', buffer.length, 'bytes');
|
||||
if (!this.isOpen) {
|
||||
debug('write', 'error port is not open');
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
this.writeOperation = (async () => {
|
||||
if (buffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
await (0, load_bindings_1.asyncWrite)(this.fd, buffer);
|
||||
this.writeOperation = null;
|
||||
})();
|
||||
return this.writeOperation;
|
||||
}
|
||||
async update(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw TypeError('"options" is not an object');
|
||||
}
|
||||
if (typeof options.baudRate !== 'number') {
|
||||
throw new TypeError('"options.baudRate" is not a number');
|
||||
}
|
||||
debug('update');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncUpdate)(this.fd, options);
|
||||
}
|
||||
async set(options) {
|
||||
if (!options || typeof options !== 'object' || Array.isArray(options)) {
|
||||
throw new TypeError('"options" is not an object');
|
||||
}
|
||||
debug('set', options);
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncSet)(this.fd, options);
|
||||
}
|
||||
async get() {
|
||||
debug('get');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, load_bindings_1.asyncGet)(this.fd);
|
||||
}
|
||||
async getBaudRate() {
|
||||
debug('getBaudRate');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
return (0, load_bindings_1.asyncGetBaudRate)(this.fd);
|
||||
}
|
||||
async flush() {
|
||||
debug('flush');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await (0, load_bindings_1.asyncFlush)(this.fd);
|
||||
}
|
||||
async drain() {
|
||||
debug('drain');
|
||||
if (!this.isOpen) {
|
||||
throw new Error('Port is not open');
|
||||
}
|
||||
await this.writeOperation;
|
||||
await (0, load_bindings_1.asyncDrain)(this.fd);
|
||||
}
|
||||
}
|
||||
exports.WindowsPortBinding = WindowsPortBinding;
|
||||
21
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/LICENSE
generated
vendored
Normal file
21
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2010 Christopher Williams. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
3
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/README.md
generated
vendored
Normal file
3
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# @serialport/parser-delimiter
|
||||
|
||||
See our api docs https://serialport.io/docs/api-parser-delimiter
|
||||
23
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/dist/index.d.ts
generated
vendored
Normal file
23
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { Transform, TransformCallback, TransformOptions } from 'stream';
|
||||
export interface DelimiterOptions extends TransformOptions {
|
||||
/** The delimiter on which to split incoming data. */
|
||||
delimiter: string | Buffer | number[];
|
||||
/** Should the delimiter be included at the end of data. Defaults to `false` */
|
||||
includeDelimiter?: boolean;
|
||||
}
|
||||
/**
|
||||
* A transform stream that emits data each time a byte sequence is received.
|
||||
* @extends Transform
|
||||
*
|
||||
* To use the `Delimiter` parser, provide a delimiter as a string, buffer, or array of bytes. Runs in O(n) time.
|
||||
*/
|
||||
export declare class DelimiterParser extends Transform {
|
||||
includeDelimiter: boolean;
|
||||
delimiter: Buffer;
|
||||
buffer: Buffer;
|
||||
constructor({ delimiter, includeDelimiter, ...options }: DelimiterOptions);
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, cb: TransformCallback): void;
|
||||
_flush(cb: TransformCallback): void;
|
||||
}
|
||||
40
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/dist/index.js
generated
vendored
Normal file
40
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DelimiterParser = void 0;
|
||||
const stream_1 = require("stream");
|
||||
/**
|
||||
* A transform stream that emits data each time a byte sequence is received.
|
||||
* @extends Transform
|
||||
*
|
||||
* To use the `Delimiter` parser, provide a delimiter as a string, buffer, or array of bytes. Runs in O(n) time.
|
||||
*/
|
||||
class DelimiterParser extends stream_1.Transform {
|
||||
constructor({ delimiter, includeDelimiter = false, ...options }) {
|
||||
super(options);
|
||||
if (delimiter === undefined) {
|
||||
throw new TypeError('"delimiter" is not a bufferable object');
|
||||
}
|
||||
if (delimiter.length === 0) {
|
||||
throw new TypeError('"delimiter" has a 0 or undefined length');
|
||||
}
|
||||
this.includeDelimiter = includeDelimiter;
|
||||
this.delimiter = Buffer.from(delimiter);
|
||||
this.buffer = Buffer.alloc(0);
|
||||
}
|
||||
_transform(chunk, encoding, cb) {
|
||||
let data = Buffer.concat([this.buffer, chunk]);
|
||||
let position;
|
||||
while ((position = data.indexOf(this.delimiter)) !== -1) {
|
||||
this.push(data.slice(0, position + (this.includeDelimiter ? this.delimiter.length : 0)));
|
||||
data = data.slice(position + this.delimiter.length);
|
||||
}
|
||||
this.buffer = data;
|
||||
cb();
|
||||
}
|
||||
_flush(cb) {
|
||||
this.push(this.buffer);
|
||||
this.buffer = Buffer.alloc(0);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
exports.DelimiterParser = DelimiterParser;
|
||||
25
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/package.json
generated
vendored
Normal file
25
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter/package.json
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@serialport/parser-delimiter",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"version": "11.0.0",
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc --build tsconfig-build.json"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/serialport/node-serialport.git"
|
||||
},
|
||||
"funding": "https://opencollective.com/serialport/donate",
|
||||
"devDependencies": {
|
||||
"typescript": "5.0.4"
|
||||
},
|
||||
"gitHead": "6a8202cd947c87ac70c9f3c84d60fe4b5f5d70a9"
|
||||
}
|
||||
21
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/LICENSE
generated
vendored
Normal file
21
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2010 Christopher Williams. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
3
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/README.md
generated
vendored
Normal file
3
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# @serialport/parser-readline
|
||||
|
||||
See our api docs See our api docs https://serialport.io/docs/api-parser-readline
|
||||
19
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/dist/index.d.ts
generated
vendored
Normal file
19
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { DelimiterParser } from '@serialport/parser-delimiter';
|
||||
import { TransformOptions } from 'stream';
|
||||
export interface ReadlineOptions extends TransformOptions {
|
||||
/** delimiter to use defaults to \n */
|
||||
delimiter?: string | Buffer | number[];
|
||||
/** include the delimiter at the end of the packet defaults to false */
|
||||
includeDelimiter?: boolean;
|
||||
/** Defaults to utf8 */
|
||||
encoding?: BufferEncoding;
|
||||
}
|
||||
/**
|
||||
* A transform stream that emits data after a newline delimiter is received.
|
||||
* @summary To use the `Readline` parser, provide a delimiter (defaults to `\n`). Data is emitted as string controllable by the `encoding` option (defaults to `utf8`).
|
||||
*/
|
||||
export declare class ReadlineParser extends DelimiterParser {
|
||||
constructor(options?: ReadlineOptions);
|
||||
}
|
||||
22
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/dist/index.js
generated
vendored
Normal file
22
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ReadlineParser = void 0;
|
||||
const parser_delimiter_1 = require("@serialport/parser-delimiter");
|
||||
/**
|
||||
* A transform stream that emits data after a newline delimiter is received.
|
||||
* @summary To use the `Readline` parser, provide a delimiter (defaults to `\n`). Data is emitted as string controllable by the `encoding` option (defaults to `utf8`).
|
||||
*/
|
||||
class ReadlineParser extends parser_delimiter_1.DelimiterParser {
|
||||
constructor(options) {
|
||||
const opts = {
|
||||
delimiter: Buffer.from('\n', 'utf8'),
|
||||
encoding: 'utf8',
|
||||
...options,
|
||||
};
|
||||
if (typeof opts.delimiter === 'string') {
|
||||
opts.delimiter = Buffer.from(opts.delimiter, opts.encoding);
|
||||
}
|
||||
super(opts);
|
||||
}
|
||||
}
|
||||
exports.ReadlineParser = ReadlineParser;
|
||||
28
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/package.json
generated
vendored
Normal file
28
node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline/package.json
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@serialport/parser-readline",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"version": "11.0.0",
|
||||
"dependencies": {
|
||||
"@serialport/parser-delimiter": "11.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc --build tsconfig-build.json"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/serialport/node-serialport.git"
|
||||
},
|
||||
"funding": "https://opencollective.com/serialport/donate",
|
||||
"devDependencies": {
|
||||
"typescript": "5.0.4"
|
||||
},
|
||||
"gitHead": "6a8202cd947c87ac70c9f3c84d60fe4b5f5d70a9"
|
||||
}
|
||||
113
node_modules/@serialport/bindings-cpp/package.json
generated
vendored
Normal file
113
node_modules/@serialport/bindings-cpp/package.json
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"name": "@serialport/bindings-cpp",
|
||||
"description": "SerialPort Hardware bindings for node serialport written in c++",
|
||||
"version": "12.0.1",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"keywords": [
|
||||
"serialport-binding",
|
||||
"COM",
|
||||
"com port",
|
||||
"hardware",
|
||||
"iot",
|
||||
"modem",
|
||||
"serial port",
|
||||
"serial",
|
||||
"serialport",
|
||||
"tty",
|
||||
"UART"
|
||||
],
|
||||
"dependencies": {
|
||||
"@serialport/bindings-interface": "1.2.2",
|
||||
"@serialport/parser-readline": "11.0.0",
|
||||
"debug": "4.3.4",
|
||||
"node-addon-api": "7.0.0",
|
||||
"node-gyp-build": "4.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@semantic-release/exec": "6.0.3",
|
||||
"@serialport/binding-mock": "10.2.2",
|
||||
"@types/chai": "4.3.5",
|
||||
"@types/chai-subset": "1.3.3",
|
||||
"@types/debug": "4.1.8",
|
||||
"@types/mocha": "10.0.1",
|
||||
"@types/node": "18.16.20",
|
||||
"@typescript-eslint/eslint-plugin": "6.1.0",
|
||||
"@typescript-eslint/parser": "6.1.0",
|
||||
"cc": "3.0.1",
|
||||
"chai": "4.3.7",
|
||||
"chai-subset": "1.6.0",
|
||||
"esbuild": "0.18.15",
|
||||
"esbuild-register": "3.4.2",
|
||||
"eslint": "8.45.0",
|
||||
"mocha": "10.2.0",
|
||||
"node-abi": "3.45.0",
|
||||
"node-gyp": "9.4.0",
|
||||
"nyc": "15.1.0",
|
||||
"prebuildify": "5.0.1",
|
||||
"prebuildify-cross": "5.0.0",
|
||||
"semantic-release": "21.0.7",
|
||||
"shx": "0.3.4",
|
||||
"sinon": "15.2.0",
|
||||
"typescript": "5.1.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rm -rf dist && tsc -p tsconfig-build.json",
|
||||
"install": "node-gyp-build",
|
||||
"prebuildify": "prebuildify --napi --target 14.0.0 --force --strip --verbose",
|
||||
"prebuildify-cross": "prebuildify-cross --napi --target 14.0.0 --force --strip --verbose",
|
||||
"rebuild": "node-gyp rebuild",
|
||||
"format": "eslint lib test bin --fix",
|
||||
"lint": "eslint lib test bin && cc --verbose",
|
||||
"test": "nyc --reporter lcov --reporter text mocha",
|
||||
"test:arduino": "TEST_PORT=$(./bin/find-arduino.ts) npm test",
|
||||
"test:watch": "mocha -w",
|
||||
"semantic-release": "semantic-release",
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"license": "MIT",
|
||||
"gypfile": true,
|
||||
"cc": {
|
||||
"filter": [
|
||||
"legal/copyright",
|
||||
"build/include"
|
||||
],
|
||||
"files": [
|
||||
"src/*.cpp",
|
||||
"src/*.h"
|
||||
],
|
||||
"linelength": "120"
|
||||
},
|
||||
"binary": {
|
||||
"napi_versions": [
|
||||
6
|
||||
]
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/serialport/bindings-cpp.git"
|
||||
},
|
||||
"funding": "https://opencollective.com/serialport/donate",
|
||||
"changelog": {
|
||||
"labels": {
|
||||
"breaking": ":boom: BREAKING CHANGES :boom:",
|
||||
"feature-request": "Features",
|
||||
"bug": "Bug Fixes",
|
||||
"docs": "Documentation",
|
||||
"internal": "Chores"
|
||||
}
|
||||
},
|
||||
"mocha": {
|
||||
"bail": true,
|
||||
"require": [
|
||||
"esbuild-register"
|
||||
],
|
||||
"spec": "lib/**/*.test.*"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user