Update header
This commit is contained in:
parent
b77a3cac96
commit
68731102df
45
.eslintrc
45
.eslintrc
|
@ -1,11 +1,14 @@
|
|||
{
|
||||
"ignorePatterns": [
|
||||
"src/**"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:node/recommended"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2020
|
||||
},
|
||||
"eslint:recommended",
|
||||
"plugin:node/recommended"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2022
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
|
@ -29,7 +32,7 @@
|
|||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"arrow-parens": ["error", "as-needed"],
|
||||
"arrow-parens": ["error", "always"],
|
||||
"no-trailing-spaces": [
|
||||
"error",
|
||||
{
|
||||
|
@ -43,6 +46,16 @@
|
|||
"SwitchCase": 1
|
||||
}
|
||||
],
|
||||
"operator-linebreak": [
|
||||
"error",
|
||||
"after",
|
||||
{
|
||||
"overrides": {
|
||||
"?": "before",
|
||||
":": "before"
|
||||
}
|
||||
}
|
||||
],
|
||||
"max-len": ["error", 110],
|
||||
"quotes": [
|
||||
"error",
|
||||
|
@ -66,7 +79,21 @@
|
|||
"no-process-exit": [0],
|
||||
"linebreak-style": [0],
|
||||
"node/no-missing-require": [0],
|
||||
"node/no-unsupported-features/node-builtins": [0],
|
||||
"no-console": [0]
|
||||
"no-console": [0],
|
||||
"node/no-unsupported-features/es-builtins": [
|
||||
"error",
|
||||
{ "version": ">=16.17.0" }
|
||||
],
|
||||
"node/no-unsupported-features/node-builtins": [
|
||||
"error",
|
||||
{ "version": ">=16.17.0" }
|
||||
],
|
||||
"func-names": [
|
||||
"error",
|
||||
"never",
|
||||
{
|
||||
"generators": "never"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,11 +4,9 @@
|
|||
"name": "Win32",
|
||||
"includePath": [
|
||||
"${workspaceFolder}/**",
|
||||
"${workspaceFolder}/node_modules/node-addon-api",
|
||||
"${LocalAppData}/node-gyp/Cache/16.17.0/include/node"
|
||||
],
|
||||
"defines": [
|
||||
"_DEBUG",
|
||||
"UNICODE",
|
||||
"_UNICODE"
|
||||
],
|
||||
|
|
|
@ -13,3 +13,4 @@ filter=-whitespace/indent
|
|||
filter=-whitespace/operators
|
||||
filter=-whitespace/parens
|
||||
filter=-readability/todo
|
||||
filter=-runtime/indentation_namespace
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
This is a part of [Node3D](https://github.com/node-3d) project.
|
||||
|
||||
[![NPM Package][npm]][npm-url]
|
||||
[](https://www.npmjs.com/package/addon-tools-raub)
|
||||
[](https://www.codefactor.io/repository/github/node-3d/addon-tools-raub)
|
||||
|
||||
|
|
4
cpbin.js
4
cpbin.js
|
@ -4,7 +4,7 @@ const { copy, exists, mkdir, rm } = require('./utils');
|
|||
const { bin } = require('.');
|
||||
|
||||
|
||||
module.exports = async name => {
|
||||
module.exports = async (name) => {
|
||||
const srcDir = process.cwd().replace(/\\/g, '/');
|
||||
|
||||
if (!await exists(`${srcDir}/build/Release/${name}.node`) ) {
|
||||
|
@ -13,7 +13,7 @@ module.exports = async name => {
|
|||
|
||||
const binAbs = `${srcDir}/../${bin}`;
|
||||
|
||||
if ( ! await exists(binAbs) ) {
|
||||
if (!await exists(binAbs)) {
|
||||
await mkdir(binAbs);
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@ const download = async (url, count = 1) => {
|
|||
const proto = protocols[url.match(/^https?/)[0]];
|
||||
|
||||
const response = await new Promise((res, rej) => {
|
||||
const request = proto.get(url, response => res(response));
|
||||
request.on('error', err => rej(err));
|
||||
const request = proto.get(url, (response) => res(response));
|
||||
request.on('error', (err) => rej(err));
|
||||
});
|
||||
|
||||
// Handle redirects
|
||||
|
@ -38,9 +38,9 @@ const download = async (url, count = 1) => {
|
|||
response.pipe(stream);
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
response.on('error', err => rej(err));
|
||||
response.on('error', (err) => rej(err));
|
||||
response.on('end', () => res(stream.get()));
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = url => download(url);
|
||||
module.exports = (url) => download(url);
|
||||
|
|
|
@ -49,13 +49,13 @@
|
|||
|
||||
|
||||
#define CHECK_REQ_ARG(I, C, T) \
|
||||
if (info.Length() <= (I) || ! info[I].C) { \
|
||||
if (info.Length() <= (I) || !info[I].C) { \
|
||||
JS_THROW("Argument " #I " must be of type `" T "`"); \
|
||||
RET_UNDEFINED; \
|
||||
}
|
||||
|
||||
#define CHECK_LET_ARG(I, C, T) \
|
||||
if ( ! (IS_ARG_EMPTY(I) || info[I].C) ) { \
|
||||
if (!(IS_ARG_EMPTY(I) || info[I].C)) { \
|
||||
JS_THROW( \
|
||||
"Argument " #I \
|
||||
" must be of type `" T \
|
||||
|
@ -194,17 +194,57 @@
|
|||
Napi::Buffer<uint8_t> VAR = info[I].As< Napi::Buffer<uint8_t> >();
|
||||
|
||||
|
||||
#define REQ_ARRAY_ARG(I, VAR) \
|
||||
CHECK_REQ_ARG(I, IsArray(), "Array"); \
|
||||
#define REQ_ARRAY_ARG(I, VAR) \
|
||||
CHECK_REQ_ARG(I, IsArray(), "Array"); \
|
||||
Napi::Array VAR = info[I].As<Napi::Array>();
|
||||
|
||||
#define USE_ARRAY_ARG(I, VAR, DEF) \
|
||||
CHECK_LET_ARG(I, IsArray(), "Array"); \
|
||||
#define USE_ARRAY_ARG(I, VAR, DEF) \
|
||||
CHECK_LET_ARG(I, IsArray(), "Array"); \
|
||||
Napi::Array VAR = IS_ARG_EMPTY(I) ? (DEF) : info[I].As<Napi::Array>();
|
||||
|
||||
#define LET_ARRAY_ARG(I, VAR) USE_ARRAY_ARG(I, VAR, Napi::Array::New(env))
|
||||
|
||||
|
||||
std::vector<std::string> arrayStrToVec(const Napi::Array &arr) {
|
||||
uint32_t count = arr.Length();
|
||||
std::vector<std::string> result(count);
|
||||
for (uint32_t i = 0; i < count; i++) {
|
||||
Napi::Value item = arr[i];
|
||||
if (item.IsString()) {
|
||||
result[i] = item.ToString().Utf8Value();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
Napi::Array stringsToArray(Napi::Env env, const char **strings, size_t count) {
|
||||
Napi::Array arr = JS_ARRAY;
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
arr.Set(i, strings[i]);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
|
||||
Napi::Array vecStrToArray(Napi::Env env, const std::vector<std::string> &strings) {
|
||||
Napi::Array arr = JS_ARRAY;
|
||||
size_t count = strings.size();
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
arr.Set(i, strings[i]);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
|
||||
#define LET_ARRAY_STR_ARG(I, VAR) \
|
||||
USE_ARRAY_ARG(I, __ARRAY_ ## VAR, Napi::Array::New(env)); \
|
||||
std::vector<std::string> VAR = arrayStrToVec(__ARRAY_ ## VAR);
|
||||
|
||||
|
||||
#define RET_ARRAY_STR(VAL) RET_VALUE(vecStrToArray(env, VAL))
|
||||
|
||||
|
||||
#define REQ_TYPED_ARRAY_ARG(I, VAR) \
|
||||
CHECK_REQ_ARG(I, IsTypedArray(), "TypedArray"); \
|
||||
Napi::TypedArray VAR = info[I].As<Napi::TypedArray>();
|
||||
|
@ -224,7 +264,7 @@
|
|||
CACHE = V;
|
||||
|
||||
#define SETTER_CHECK(C, T) \
|
||||
if ( ! value.C ) { \
|
||||
if (!value.C) { \
|
||||
JS_THROW("Value must be " T); \
|
||||
RET_UNDEFINED; \
|
||||
}
|
||||
|
@ -330,7 +370,6 @@ inline Type* getArrayData(
|
|||
Napi::Object obj,
|
||||
int *num = nullptr
|
||||
) {
|
||||
|
||||
Type *out = nullptr;
|
||||
|
||||
if (obj.IsTypedArray()) {
|
||||
|
@ -356,23 +395,22 @@ inline Type* getArrayData(
|
|||
}
|
||||
|
||||
return out;
|
||||
|
||||
}
|
||||
|
||||
|
||||
template<typename Type = uint8_t>
|
||||
inline Type* getBufferData(
|
||||
Napi::Env env,
|
||||
Napi::Object obj,
|
||||
int *num = nullptr
|
||||
) {
|
||||
|
||||
Type *out = nullptr;
|
||||
|
||||
if (num) {
|
||||
*num = 0;
|
||||
}
|
||||
|
||||
if ( ! obj.IsBuffer() ) {
|
||||
if (!obj.IsBuffer()) {
|
||||
JS_THROW("Argument must be of type `Buffer`.");
|
||||
return out;
|
||||
}
|
||||
|
@ -384,12 +422,10 @@ inline Type* getBufferData(
|
|||
out = arr.Data();
|
||||
|
||||
return out;
|
||||
|
||||
}
|
||||
|
||||
|
||||
inline void *getData(Napi::Env env, Napi::Object obj) {
|
||||
|
||||
void *out = nullptr;
|
||||
|
||||
if (obj.IsTypedArray() || obj.IsArrayBuffer()) {
|
||||
|
@ -406,7 +442,6 @@ inline void *getData(Napi::Env env, Napi::Object obj) {
|
|||
}
|
||||
|
||||
return out;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -438,8 +473,7 @@ inline void eventEmit(
|
|||
int argc = 0,
|
||||
const Napi::Value *argv = nullptr
|
||||
) {
|
||||
|
||||
if ( ! that.Has("emit") ) {
|
||||
if (!that.Has("emit")) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -455,7 +489,6 @@ inline void eventEmit(
|
|||
}
|
||||
|
||||
thatEmit.Call(that, args);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -466,8 +499,7 @@ inline void eventEmitAsync(
|
|||
const Napi::Value *argv = nullptr,
|
||||
napi_async_context context = nullptr
|
||||
) {
|
||||
|
||||
if ( ! that.Has("emit") ) {
|
||||
if (!that.Has("emit")) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -483,7 +515,6 @@ inline void eventEmitAsync(
|
|||
}
|
||||
|
||||
thatEmit.MakeCallback(that, args, context);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
4
index.js
4
index.js
|
@ -17,7 +17,7 @@ const platformNames = {
|
|||
const platformName = platformNames[process.platform];
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
if ( ! platformName ) {
|
||||
if (!platformName) {
|
||||
console.log(`Error: UNKNOWN PLATFORM "${process.platform}"`);
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ const thisInclude = `${rootPath}/include`;
|
|||
const includePath = `${napiInclude} ${thisInclude}`;
|
||||
|
||||
|
||||
const paths = dir => {
|
||||
const paths = (dir) => {
|
||||
dir = dir.replace(/\\/g, '/');
|
||||
|
||||
const bin = `${dir}/bin-${platformName}`;
|
||||
|
|
12
install.js
12
install.js
|
@ -18,7 +18,7 @@ const { mkdir, rm } = require('./utils');
|
|||
|
||||
const protocols = { http, https };
|
||||
|
||||
const onError = msg => {
|
||||
const onError = (msg) => {
|
||||
console.error(msg);
|
||||
process.exit(-1);
|
||||
};
|
||||
|
@ -31,10 +31,10 @@ const install = async (url, count = 1) => {
|
|||
const proto = protocols[url.match(/^https?/)[0]];
|
||||
|
||||
const response = await new Promise((res, rej) => {
|
||||
const request = proto.get(url, response => res(response));
|
||||
request.on('error', err => rej(err));
|
||||
const request = proto.get(url, (response) => res(response));
|
||||
request.on('error', (err) => rej(err));
|
||||
});
|
||||
response.on('error', err => { throw err; });
|
||||
response.on('error', (err) => { throw err; });
|
||||
|
||||
// Handle redirects
|
||||
if ([301, 302, 303, 307].includes(response.statusCode)) {
|
||||
|
@ -55,7 +55,7 @@ const install = async (url, count = 1) => {
|
|||
|
||||
await new Promise((res, rej) => {
|
||||
const zipWriter = fs.createWriteStream(zipPath);
|
||||
zipWriter.on('error', err => rej(err));
|
||||
zipWriter.on('error', (err) => rej(err));
|
||||
zipWriter.on('finish', () => res());
|
||||
response.pipe(zipWriter);
|
||||
});
|
||||
|
@ -71,7 +71,7 @@ const install = async (url, count = 1) => {
|
|||
};
|
||||
|
||||
|
||||
module.exports = folder => {
|
||||
module.exports = (folder) => {
|
||||
const url = `${folder}/${platform}.zip`;
|
||||
install(url).then();
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"author": "Luis Blanco <luisblanco1337@gmail.com>",
|
||||
"name": "addon-tools-raub",
|
||||
"version": "6.0.2",
|
||||
"version": "6.1.0",
|
||||
"description": "Helpers for Node.js addons and dependency packages",
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
|
@ -78,7 +78,7 @@
|
|||
"adm-zip": "^0.5.9",
|
||||
"eslint-plugin-jest": "^27.1.6",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint": "^8.28.0",
|
||||
"eslint": "^8.29.0",
|
||||
"jest": "^29.3.1",
|
||||
"node-addon-api": "^5.0.0",
|
||||
"typescript": "^4.9.3"
|
||||
|
|
|
@ -5,11 +5,13 @@
|
|||
'sources': [
|
||||
'test.cpp',
|
||||
],
|
||||
'cflags!': ['-fno-exceptions'],
|
||||
'cflags_cc!': ['-fno-exceptions'],
|
||||
'defines': [
|
||||
'UNICODE', '_UNICODE'
|
||||
],
|
||||
'include_dirs': [
|
||||
'<!@(node -p "require(\'..\').include")',
|
||||
],
|
||||
'cflags_cc': ['-std=c++17'],
|
||||
'conditions': [
|
||||
[
|
||||
'OS=="linux"',
|
||||
|
@ -21,6 +23,7 @@
|
|||
'OS=="mac"',
|
||||
{
|
||||
'defines': ['__APPLE__'],
|
||||
'OTHER_CFLAGS': ['-std=c++17'],
|
||||
}
|
||||
],
|
||||
[
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
'use strict';
|
||||
|
||||
const test = require('./build/Release/test.node');
|
||||
|
||||
|
||||
const arrayArgMsg = 'Argument 0 must be of type `Array`';
|
||||
|
||||
describe('addon-tools.hpp: LET_ARRAY_ARG', () => {
|
||||
it('exports letArrayStrArg', () => {
|
||||
expect(typeof test.letArrayStrArg).toBe('function');
|
||||
});
|
||||
it('throws if arg was passed a string', () => {
|
||||
expect(() => test.letArrayStrArg('1')).toThrow(arrayArgMsg);
|
||||
});
|
||||
it('throws if arg was passed a number', () => {
|
||||
expect(() => test.letArrayStrArg(1)).toThrow(arrayArgMsg);
|
||||
});
|
||||
it('throws if arg was passed a boolean', () => {
|
||||
expect(() => test.letArrayStrArg(true)).toThrow(arrayArgMsg);
|
||||
});
|
||||
it('throws if arg was passed a pointer', () => {
|
||||
expect(() => test.letArrayStrArg(test.retExt())).toThrow(arrayArgMsg);
|
||||
});
|
||||
it('throws if arg was passed an object', () => {
|
||||
expect(() => test.letArrayStrArg({})).toThrow(arrayArgMsg);
|
||||
});
|
||||
it('accepts an empty arg', () => {
|
||||
expect(Array.isArray(test.letArrayStrArg())).toBe(true);
|
||||
});
|
||||
it('accepts undefined', () => {
|
||||
expect(Array.isArray(test.letArrayStrArg(undefined))).toBe(true);
|
||||
});
|
||||
it('accepts null', () => {
|
||||
expect(Array.isArray(test.letArrayStrArg(null))).toBe(true);
|
||||
});
|
||||
it('accepts an array', () => {
|
||||
expect(Array.isArray(test.letArrayStrArg([]))).toBe(true);
|
||||
});
|
||||
it('returns same array', () => {
|
||||
expect(test.letArrayStrArg(['a', 'b'])).toBe(['a', 'b']);
|
||||
});
|
||||
});
|
|
@ -7,7 +7,7 @@ describe('index.js', () => {
|
|||
describe(
|
||||
'Properties',
|
||||
() => ['bin', 'platform', 'include'].forEach(
|
||||
m => it(`#${m} is a string`, () => {
|
||||
(m) => it(`#${m} is a string`, () => {
|
||||
expect(typeof tools[m]).toBe('string');
|
||||
})
|
||||
)
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
|
||||
JS_METHOD(empty) { NAPI_ENV;
|
||||
NAPI_HS;
|
||||
return env.Undefined();
|
||||
RET_UNDEFINED;
|
||||
}
|
||||
|
||||
JS_METHOD(throwing) { NAPI_ENV;
|
||||
JS_THROW("Some error");
|
||||
return env.Undefined();
|
||||
RET_UNDEFINED;
|
||||
}
|
||||
|
||||
JS_METHOD(retUndefined) { NAPI_ENV;
|
||||
|
@ -202,6 +202,11 @@ JS_METHOD(letArrayArg) { NAPI_ENV;
|
|||
RET_VALUE(arg);
|
||||
}
|
||||
|
||||
JS_METHOD(letArrayStrArg) { NAPI_ENV;
|
||||
LET_ARRAY_STR_ARG(0, arg);
|
||||
RET_ARRAY_STR(arg);
|
||||
}
|
||||
|
||||
JS_METHOD(reqFunArg) { NAPI_ENV;
|
||||
REQ_FUN_ARG(0, arg);
|
||||
RET_VALUE(arg);
|
||||
|
|
52
utils.js
52
utils.js
|
@ -3,7 +3,7 @@
|
|||
const fs = require('fs');
|
||||
|
||||
// (async) Reads a whole file to string, NOT A Buffer
|
||||
const read = name => new Promise(
|
||||
const read = (name) => new Promise(
|
||||
(res, rej) => fs.readFile(
|
||||
name,
|
||||
(err, data) => (err ? rej(err) : res(data.toString()))
|
||||
|
@ -13,7 +13,7 @@ const read = name => new Promise(
|
|||
|
||||
// (async) Write a file
|
||||
const write = (name, text) => new Promise(
|
||||
(res, rej) => fs.writeFile(name, text, err => (err ? rej(err) : res()))
|
||||
(res, rej) => fs.writeFile(name, text, (err) => (err ? rej(err) : res()))
|
||||
);
|
||||
|
||||
|
||||
|
@ -21,7 +21,7 @@ const write = (name, text) => new Promise(
|
|||
const copy = async (src, dest) => {
|
||||
try {
|
||||
await new Promise(
|
||||
(res, rej) => fs.copyFile(src, dest, err => (err ? rej(err) : res()))
|
||||
(res, rej) => fs.copyFile(src, dest, (err) => (err ? rej(err) : res()))
|
||||
);
|
||||
} catch (e) {
|
||||
if (e.code !== 'EBUSY') {
|
||||
|
@ -32,46 +32,46 @@ const copy = async (src, dest) => {
|
|||
|
||||
|
||||
// (async) Check if a file/folder exists
|
||||
const exists = name => new Promise(
|
||||
res => fs.access(
|
||||
const exists = (name) => new Promise(
|
||||
(res) => fs.access(
|
||||
name,
|
||||
fs.constants.F_OK,
|
||||
err => res(err ? false : true)
|
||||
(err) => res(err ? false : true)
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
// (async) Create an empty folder
|
||||
const mkdir = async name => {
|
||||
const mkdir = async (name) => {
|
||||
if (await exists(name)) {
|
||||
return;
|
||||
}
|
||||
return new Promise(
|
||||
(res, rej) => fs.mkdir(name, err => (err ? rej(err) : res()))
|
||||
(res, rej) => fs.mkdir(name, (err) => (err ? rej(err) : res()))
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
// (async) Get status on a file
|
||||
const stat = name => new Promise(
|
||||
const stat = (name) => new Promise(
|
||||
(res, rej) => fs.stat(name, (err, stats) => (err ? rej(err) : res(stats)))
|
||||
);
|
||||
|
||||
|
||||
// (async) Check if the path is a folder
|
||||
const isDir = async name => (await stat(name)).isDirectory();
|
||||
const isDir = async (name) => (await stat(name)).isDirectory();
|
||||
|
||||
|
||||
// (async) Check if the path is a file
|
||||
const isFile = async name => (await stat(name)).isFile();
|
||||
const isFile = async (name) => (await stat(name)).isFile();
|
||||
|
||||
|
||||
// Cut the path one folder up
|
||||
const dirUp = dir => dir.replace(/\\/g, '/').split('/').slice(0, -1).join('/');
|
||||
const dirUp = (dir) => dir.replace(/\\/g, '/').split('/').slice(0, -1).join('/');
|
||||
|
||||
|
||||
// (async) Like `mkdir -p`, makes sure a directory exists
|
||||
const ensuredir = async dir => {
|
||||
const ensuredir = async (dir) => {
|
||||
if (await exists(dir) && await isDir(dir)) {
|
||||
return;
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ const copysafe = async (src, dest) => {
|
|||
|
||||
|
||||
// (async) Get file/folder names of the 1st level
|
||||
const readdir = name => new Promise(
|
||||
const readdir = (name) => new Promise(
|
||||
(res, rej) => fs.readdir(
|
||||
name,
|
||||
(err, dirents) => (err ? rej(err) : res(dirents))
|
||||
|
@ -97,18 +97,18 @@ const readdir = name => new Promise(
|
|||
|
||||
|
||||
// (async) Get folder paths (concatenated with input) of the 1st level
|
||||
const subdirs = async name => {
|
||||
const subdirs = async (name) => {
|
||||
const all = await readdir(name);
|
||||
const mapped = await Promise.all(all.map(d => isDir(`${name}/${d}`)));
|
||||
const mapped = await Promise.all(all.map((d) => isDir(`${name}/${d}`)));
|
||||
return all.filter((_, i) => mapped[i]);
|
||||
};
|
||||
|
||||
|
||||
// (async) Get file paths (concatenated with input) of the 1st level
|
||||
const subfiles = async name => {
|
||||
const subfiles = async (name) => {
|
||||
const all = await readdir(name);
|
||||
const mapped = await Promise.all(all.map(d => isFile(`${name}/${d}`)));
|
||||
return all.filter((_, i) => mapped[i]).map(f => `${name}/${f}`);
|
||||
const mapped = await Promise.all(all.map((d) => isFile(`${name}/${d}`)));
|
||||
return all.filter((_, i) => mapped[i]).map((f) => `${name}/${f}`);
|
||||
};
|
||||
|
||||
|
||||
|
@ -121,7 +121,7 @@ const traverse = async (name, showDirs = false) => {
|
|||
while (stack.length) {
|
||||
const dir = stack.pop();
|
||||
dirs.push(dir);
|
||||
(await subdirs(dir)).forEach(d => stack.push(`${dir}/${d}`));
|
||||
(await subdirs(dir)).forEach((d) => stack.push(`${dir}/${d}`));
|
||||
}
|
||||
return (showDirs ? dirs : []).concat(
|
||||
...(await Promise.all(dirs.map(subfiles)))
|
||||
|
@ -145,8 +145,8 @@ const copyall = async (src, dest) => {
|
|||
|
||||
|
||||
// (async) Like `rm -rf`, removes everything recursively
|
||||
const rmdir = async name => {
|
||||
if ( ! await exists(name) ) {
|
||||
const rmdir = async (name) => {
|
||||
if (!await exists(name)) {
|
||||
return;
|
||||
}
|
||||
const paths = await traverse(name, true);
|
||||
|
@ -156,7 +156,7 @@ const rmdir = async name => {
|
|||
await new Promise(
|
||||
(res, rej) => fs[dir ? 'rmdir' : 'unlink'](
|
||||
target,
|
||||
err => (err ? rej(err) : res())
|
||||
(err) => (err ? rej(err) : res())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -164,12 +164,12 @@ const rmdir = async name => {
|
|||
|
||||
|
||||
// (async) Remove a file. Must be a file, not a folder. Just `fs.unlink`.
|
||||
const rm = async name => {
|
||||
if ( ! await exists(name) ) {
|
||||
const rm = async (name) => {
|
||||
if (!await exists(name)) {
|
||||
return;
|
||||
}
|
||||
await new Promise(
|
||||
(res, rej) => fs.unlink(name, err => (err ? rej(err) : res()))
|
||||
(res, rej) => fs.unlink(name, (err) => (err ? rej(err) : res()))
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -2,25 +2,22 @@
|
|||
|
||||
const { Writable } = require('stream');
|
||||
|
||||
|
||||
const CHUNK_SIZE = 1024;
|
||||
const INITIAL_SIZE = 8 * CHUNK_SIZE;
|
||||
const INCREMENT_SIZE = 8 * CHUNK_SIZE;
|
||||
|
||||
|
||||
class WritableBuffer extends Writable {
|
||||
|
||||
constructor() {
|
||||
|
||||
super();
|
||||
|
||||
this._buffer = Buffer.alloc(INITIAL_SIZE);
|
||||
this._size = 0;
|
||||
|
||||
}
|
||||
|
||||
get() {
|
||||
|
||||
if ( ! this._size ) {
|
||||
if (!this._size) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -28,13 +25,11 @@ class WritableBuffer extends Writable {
|
|||
this._buffer.copy(data, 0, 0, this._size);
|
||||
|
||||
return data;
|
||||
|
||||
}
|
||||
|
||||
|
||||
_increaseAsNeeded(incomingSize) {
|
||||
|
||||
if ( (this._buffer.length - this._size) >= incomingSize ) {
|
||||
if ((this._buffer.length - this._size) >= incomingSize) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -45,21 +40,17 @@ class WritableBuffer extends Writable {
|
|||
this._buffer.copy(newBuffer, 0, 0, this._size);
|
||||
|
||||
this._buffer = newBuffer;
|
||||
|
||||
}
|
||||
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
|
||||
this._increaseAsNeeded(chunk.length);
|
||||
|
||||
chunk.copy(this._buffer, this._size, 0);
|
||||
this._size += chunk.length;
|
||||
|
||||
callback();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = WritableBuffer;
|
||||
|
|
Loading…
Reference in New Issue