Update header

This commit is contained in:
Luis Blanco 2022-12-05 19:47:08 +04:00
parent b77a3cac96
commit 68731102df
16 changed files with 189 additions and 90 deletions

View File

@ -1,10 +1,13 @@
{ {
"ignorePatterns": [
"src/**"
],
"extends": [ "extends": [
"eslint:recommended", "eslint:recommended",
"plugin:node/recommended" "plugin:node/recommended"
], ],
"parserOptions": { "parserOptions": {
"ecmaVersion": 2020 "ecmaVersion": 2022
}, },
"overrides": [ "overrides": [
{ {
@ -29,7 +32,7 @@
"es6": true "es6": true
}, },
"rules": { "rules": {
"arrow-parens": ["error", "as-needed"], "arrow-parens": ["error", "always"],
"no-trailing-spaces": [ "no-trailing-spaces": [
"error", "error",
{ {
@ -43,6 +46,16 @@
"SwitchCase": 1 "SwitchCase": 1
} }
], ],
"operator-linebreak": [
"error",
"after",
{
"overrides": {
"?": "before",
":": "before"
}
}
],
"max-len": ["error", 110], "max-len": ["error", 110],
"quotes": [ "quotes": [
"error", "error",
@ -66,7 +79,21 @@
"no-process-exit": [0], "no-process-exit": [0],
"linebreak-style": [0], "linebreak-style": [0],
"node/no-missing-require": [0], "node/no-missing-require": [0],
"node/no-unsupported-features/node-builtins": [0], "no-console": [0],
"no-console": [0] "node/no-unsupported-features/es-builtins": [
"error",
{ "version": ">=16.17.0" }
],
"node/no-unsupported-features/node-builtins": [
"error",
{ "version": ">=16.17.0" }
],
"func-names": [
"error",
"never",
{
"generators": "never"
}
]
} }
} }

View File

@ -4,11 +4,9 @@
"name": "Win32", "name": "Win32",
"includePath": [ "includePath": [
"${workspaceFolder}/**", "${workspaceFolder}/**",
"${workspaceFolder}/node_modules/node-addon-api",
"${LocalAppData}/node-gyp/Cache/16.17.0/include/node" "${LocalAppData}/node-gyp/Cache/16.17.0/include/node"
], ],
"defines": [ "defines": [
"_DEBUG",
"UNICODE", "UNICODE",
"_UNICODE" "_UNICODE"
], ],

View File

@ -13,3 +13,4 @@ filter=-whitespace/indent
filter=-whitespace/operators filter=-whitespace/operators
filter=-whitespace/parens filter=-whitespace/parens
filter=-readability/todo filter=-readability/todo
filter=-runtime/indentation_namespace

View File

@ -2,6 +2,7 @@
This is a part of [Node3D](https://github.com/node-3d) project. This is a part of [Node3D](https://github.com/node-3d) project.
[![NPM Package][npm]][npm-url]
[![NPM](https://nodei.co/npm/addon-tools-raub.png?compact=true)](https://www.npmjs.com/package/addon-tools-raub) [![NPM](https://nodei.co/npm/addon-tools-raub.png?compact=true)](https://www.npmjs.com/package/addon-tools-raub)
[![CodeFactor](https://www.codefactor.io/repository/github/node-3d/addon-tools-raub/badge)](https://www.codefactor.io/repository/github/node-3d/addon-tools-raub) [![CodeFactor](https://www.codefactor.io/repository/github/node-3d/addon-tools-raub/badge)](https://www.codefactor.io/repository/github/node-3d/addon-tools-raub)

View File

@ -4,7 +4,7 @@ const { copy, exists, mkdir, rm } = require('./utils');
const { bin } = require('.'); const { bin } = require('.');
module.exports = async name => { module.exports = async (name) => {
const srcDir = process.cwd().replace(/\\/g, '/'); const srcDir = process.cwd().replace(/\\/g, '/');
if (!await exists(`${srcDir}/build/Release/${name}.node`) ) { if (!await exists(`${srcDir}/build/Release/${name}.node`) ) {
@ -13,7 +13,7 @@ module.exports = async name => {
const binAbs = `${srcDir}/../${bin}`; const binAbs = `${srcDir}/../${bin}`;
if ( ! await exists(binAbs) ) { if (!await exists(binAbs)) {
await mkdir(binAbs); await mkdir(binAbs);
} }

View File

@ -16,8 +16,8 @@ const download = async (url, count = 1) => {
const proto = protocols[url.match(/^https?/)[0]]; const proto = protocols[url.match(/^https?/)[0]];
const response = await new Promise((res, rej) => { const response = await new Promise((res, rej) => {
const request = proto.get(url, response => res(response)); const request = proto.get(url, (response) => res(response));
request.on('error', err => rej(err)); request.on('error', (err) => rej(err));
}); });
// Handle redirects // Handle redirects
@ -38,9 +38,9 @@ const download = async (url, count = 1) => {
response.pipe(stream); response.pipe(stream);
return new Promise((res, rej) => { return new Promise((res, rej) => {
response.on('error', err => rej(err)); response.on('error', (err) => rej(err));
response.on('end', () => res(stream.get())); response.on('end', () => res(stream.get()));
}); });
}; };
module.exports = url => download(url); module.exports = (url) => download(url);

View File

@ -49,13 +49,13 @@
#define CHECK_REQ_ARG(I, C, T) \ #define CHECK_REQ_ARG(I, C, T) \
if (info.Length() <= (I) || ! info[I].C) { \ if (info.Length() <= (I) || !info[I].C) { \
JS_THROW("Argument " #I " must be of type `" T "`"); \ JS_THROW("Argument " #I " must be of type `" T "`"); \
RET_UNDEFINED; \ RET_UNDEFINED; \
} }
#define CHECK_LET_ARG(I, C, T) \ #define CHECK_LET_ARG(I, C, T) \
if ( ! (IS_ARG_EMPTY(I) || info[I].C) ) { \ if (!(IS_ARG_EMPTY(I) || info[I].C)) { \
JS_THROW( \ JS_THROW( \
"Argument " #I \ "Argument " #I \
" must be of type `" T \ " must be of type `" T \
@ -205,6 +205,46 @@
#define LET_ARRAY_ARG(I, VAR) USE_ARRAY_ARG(I, VAR, Napi::Array::New(env)) #define LET_ARRAY_ARG(I, VAR) USE_ARRAY_ARG(I, VAR, Napi::Array::New(env))
std::vector<std::string> arrayStrToVec(const Napi::Array &arr) {
uint32_t count = arr.Length();
std::vector<std::string> result(count);
for (uint32_t i = 0; i < count; i++) {
Napi::Value item = arr[i];
if (item.IsString()) {
result[i] = item.ToString().Utf8Value();
}
}
return result;
}
Napi::Array stringsToArray(Napi::Env env, const char **strings, size_t count) {
Napi::Array arr = JS_ARRAY;
for (size_t i = 0; i < count; i++) {
arr.Set(i, strings[i]);
}
return arr;
}
Napi::Array vecStrToArray(Napi::Env env, const std::vector<std::string> &strings) {
Napi::Array arr = JS_ARRAY;
size_t count = strings.size();
for (size_t i = 0; i < count; i++) {
arr.Set(i, strings[i]);
}
return arr;
}
#define LET_ARRAY_STR_ARG(I, VAR) \
USE_ARRAY_ARG(I, __ARRAY_ ## VAR, Napi::Array::New(env)); \
std::vector<std::string> VAR = arrayStrToVec(__ARRAY_ ## VAR);
#define RET_ARRAY_STR(VAL) RET_VALUE(vecStrToArray(env, VAL))
#define REQ_TYPED_ARRAY_ARG(I, VAR) \ #define REQ_TYPED_ARRAY_ARG(I, VAR) \
CHECK_REQ_ARG(I, IsTypedArray(), "TypedArray"); \ CHECK_REQ_ARG(I, IsTypedArray(), "TypedArray"); \
Napi::TypedArray VAR = info[I].As<Napi::TypedArray>(); Napi::TypedArray VAR = info[I].As<Napi::TypedArray>();
@ -224,7 +264,7 @@
CACHE = V; CACHE = V;
#define SETTER_CHECK(C, T) \ #define SETTER_CHECK(C, T) \
if ( ! value.C ) { \ if (!value.C) { \
JS_THROW("Value must be " T); \ JS_THROW("Value must be " T); \
RET_UNDEFINED; \ RET_UNDEFINED; \
} }
@ -330,7 +370,6 @@ inline Type* getArrayData(
Napi::Object obj, Napi::Object obj,
int *num = nullptr int *num = nullptr
) { ) {
Type *out = nullptr; Type *out = nullptr;
if (obj.IsTypedArray()) { if (obj.IsTypedArray()) {
@ -356,23 +395,22 @@ inline Type* getArrayData(
} }
return out; return out;
} }
template<typename Type = uint8_t> template<typename Type = uint8_t>
inline Type* getBufferData( inline Type* getBufferData(
Napi::Env env, Napi::Env env,
Napi::Object obj, Napi::Object obj,
int *num = nullptr int *num = nullptr
) { ) {
Type *out = nullptr; Type *out = nullptr;
if (num) { if (num) {
*num = 0; *num = 0;
} }
if ( ! obj.IsBuffer() ) { if (!obj.IsBuffer()) {
JS_THROW("Argument must be of type `Buffer`."); JS_THROW("Argument must be of type `Buffer`.");
return out; return out;
} }
@ -384,12 +422,10 @@ inline Type* getBufferData(
out = arr.Data(); out = arr.Data();
return out; return out;
} }
inline void *getData(Napi::Env env, Napi::Object obj) { inline void *getData(Napi::Env env, Napi::Object obj) {
void *out = nullptr; void *out = nullptr;
if (obj.IsTypedArray() || obj.IsArrayBuffer()) { if (obj.IsTypedArray() || obj.IsArrayBuffer()) {
@ -406,7 +442,6 @@ inline void *getData(Napi::Env env, Napi::Object obj) {
} }
return out; return out;
} }
@ -438,8 +473,7 @@ inline void eventEmit(
int argc = 0, int argc = 0,
const Napi::Value *argv = nullptr const Napi::Value *argv = nullptr
) { ) {
if (!that.Has("emit")) {
if ( ! that.Has("emit") ) {
return; return;
} }
@ -455,7 +489,6 @@ inline void eventEmit(
} }
thatEmit.Call(that, args); thatEmit.Call(that, args);
} }
@ -466,8 +499,7 @@ inline void eventEmitAsync(
const Napi::Value *argv = nullptr, const Napi::Value *argv = nullptr,
napi_async_context context = nullptr napi_async_context context = nullptr
) { ) {
if (!that.Has("emit")) {
if ( ! that.Has("emit") ) {
return; return;
} }
@ -483,7 +515,6 @@ inline void eventEmitAsync(
} }
thatEmit.MakeCallback(that, args, context); thatEmit.MakeCallback(that, args, context);
} }

View File

@ -17,7 +17,7 @@ const platformNames = {
const platformName = platformNames[process.platform]; const platformName = platformNames[process.platform];
const isWindows = process.platform === 'win32'; const isWindows = process.platform === 'win32';
if ( ! platformName ) { if (!platformName) {
console.log(`Error: UNKNOWN PLATFORM "${process.platform}"`); console.log(`Error: UNKNOWN PLATFORM "${process.platform}"`);
} }
@ -29,7 +29,7 @@ const thisInclude = `${rootPath}/include`;
const includePath = `${napiInclude} ${thisInclude}`; const includePath = `${napiInclude} ${thisInclude}`;
const paths = dir => { const paths = (dir) => {
dir = dir.replace(/\\/g, '/'); dir = dir.replace(/\\/g, '/');
const bin = `${dir}/bin-${platformName}`; const bin = `${dir}/bin-${platformName}`;

View File

@ -18,7 +18,7 @@ const { mkdir, rm } = require('./utils');
const protocols = { http, https }; const protocols = { http, https };
const onError = msg => { const onError = (msg) => {
console.error(msg); console.error(msg);
process.exit(-1); process.exit(-1);
}; };
@ -31,10 +31,10 @@ const install = async (url, count = 1) => {
const proto = protocols[url.match(/^https?/)[0]]; const proto = protocols[url.match(/^https?/)[0]];
const response = await new Promise((res, rej) => { const response = await new Promise((res, rej) => {
const request = proto.get(url, response => res(response)); const request = proto.get(url, (response) => res(response));
request.on('error', err => rej(err)); request.on('error', (err) => rej(err));
}); });
response.on('error', err => { throw err; }); response.on('error', (err) => { throw err; });
// Handle redirects // Handle redirects
if ([301, 302, 303, 307].includes(response.statusCode)) { if ([301, 302, 303, 307].includes(response.statusCode)) {
@ -55,7 +55,7 @@ const install = async (url, count = 1) => {
await new Promise((res, rej) => { await new Promise((res, rej) => {
const zipWriter = fs.createWriteStream(zipPath); const zipWriter = fs.createWriteStream(zipPath);
zipWriter.on('error', err => rej(err)); zipWriter.on('error', (err) => rej(err));
zipWriter.on('finish', () => res()); zipWriter.on('finish', () => res());
response.pipe(zipWriter); response.pipe(zipWriter);
}); });
@ -71,7 +71,7 @@ const install = async (url, count = 1) => {
}; };
module.exports = folder => { module.exports = (folder) => {
const url = `${folder}/${platform}.zip`; const url = `${folder}/${platform}.zip`;
install(url).then(); install(url).then();
}; };

View File

@ -1,7 +1,7 @@
{ {
"author": "Luis Blanco <luisblanco1337@gmail.com>", "author": "Luis Blanco <luisblanco1337@gmail.com>",
"name": "addon-tools-raub", "name": "addon-tools-raub",
"version": "6.0.2", "version": "6.1.0",
"description": "Helpers for Node.js addons and dependency packages", "description": "Helpers for Node.js addons and dependency packages",
"license": "MIT", "license": "MIT",
"main": "index.js", "main": "index.js",
@ -78,7 +78,7 @@
"adm-zip": "^0.5.9", "adm-zip": "^0.5.9",
"eslint-plugin-jest": "^27.1.6", "eslint-plugin-jest": "^27.1.6",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint": "^8.28.0", "eslint": "^8.29.0",
"jest": "^29.3.1", "jest": "^29.3.1",
"node-addon-api": "^5.0.0", "node-addon-api": "^5.0.0",
"typescript": "^4.9.3" "typescript": "^4.9.3"

7
test/binding.gyp vendored
View File

@ -5,11 +5,13 @@
'sources': [ 'sources': [
'test.cpp', 'test.cpp',
], ],
'cflags!': ['-fno-exceptions'], 'defines': [
'cflags_cc!': ['-fno-exceptions'], 'UNICODE', '_UNICODE'
],
'include_dirs': [ 'include_dirs': [
'<!@(node -p "require(\'..\').include")', '<!@(node -p "require(\'..\').include")',
], ],
'cflags_cc': ['-std=c++17'],
'conditions': [ 'conditions': [
[ [
'OS=="linux"', 'OS=="linux"',
@ -21,6 +23,7 @@
'OS=="mac"', 'OS=="mac"',
{ {
'defines': ['__APPLE__'], 'defines': ['__APPLE__'],
'OTHER_CFLAGS': ['-std=c++17'],
} }
], ],
[ [

View File

@ -0,0 +1,42 @@
'use strict';
const test = require('./build/Release/test.node');
const arrayArgMsg = 'Argument 0 must be of type `Array`';
describe('addon-tools.hpp: LET_ARRAY_ARG', () => {
it('exports letArrayStrArg', () => {
expect(typeof test.letArrayStrArg).toBe('function');
});
it('throws if arg was passed a string', () => {
expect(() => test.letArrayStrArg('1')).toThrow(arrayArgMsg);
});
it('throws if arg was passed a number', () => {
expect(() => test.letArrayStrArg(1)).toThrow(arrayArgMsg);
});
it('throws if arg was passed a boolean', () => {
expect(() => test.letArrayStrArg(true)).toThrow(arrayArgMsg);
});
it('throws if arg was passed a pointer', () => {
expect(() => test.letArrayStrArg(test.retExt())).toThrow(arrayArgMsg);
});
it('throws if arg was passed an object', () => {
expect(() => test.letArrayStrArg({})).toThrow(arrayArgMsg);
});
it('accepts an empty arg', () => {
expect(Array.isArray(test.letArrayStrArg())).toBe(true);
});
it('accepts undefined', () => {
expect(Array.isArray(test.letArrayStrArg(undefined))).toBe(true);
});
it('accepts null', () => {
expect(Array.isArray(test.letArrayStrArg(null))).toBe(true);
});
it('accepts an array', () => {
expect(Array.isArray(test.letArrayStrArg([]))).toBe(true);
});
it('returns same array', () => {
expect(test.letArrayStrArg(['a', 'b'])).toBe(['a', 'b']);
});
});

View File

@ -7,7 +7,7 @@ describe('index.js', () => {
describe( describe(
'Properties', 'Properties',
() => ['bin', 'platform', 'include'].forEach( () => ['bin', 'platform', 'include'].forEach(
m => it(`#${m} is a string`, () => { (m) => it(`#${m} is a string`, () => {
expect(typeof tools[m]).toBe('string'); expect(typeof tools[m]).toBe('string');
}) })
) )

View File

@ -3,12 +3,12 @@
JS_METHOD(empty) { NAPI_ENV; JS_METHOD(empty) { NAPI_ENV;
NAPI_HS; NAPI_HS;
return env.Undefined(); RET_UNDEFINED;
} }
JS_METHOD(throwing) { NAPI_ENV; JS_METHOD(throwing) { NAPI_ENV;
JS_THROW("Some error"); JS_THROW("Some error");
return env.Undefined(); RET_UNDEFINED;
} }
JS_METHOD(retUndefined) { NAPI_ENV; JS_METHOD(retUndefined) { NAPI_ENV;
@ -202,6 +202,11 @@ JS_METHOD(letArrayArg) { NAPI_ENV;
RET_VALUE(arg); RET_VALUE(arg);
} }
JS_METHOD(letArrayStrArg) { NAPI_ENV;
LET_ARRAY_STR_ARG(0, arg);
RET_ARRAY_STR(arg);
}
JS_METHOD(reqFunArg) { NAPI_ENV; JS_METHOD(reqFunArg) { NAPI_ENV;
REQ_FUN_ARG(0, arg); REQ_FUN_ARG(0, arg);
RET_VALUE(arg); RET_VALUE(arg);

View File

@ -3,7 +3,7 @@
const fs = require('fs'); const fs = require('fs');
// (async) Reads a whole file to string, NOT A Buffer // (async) Reads a whole file to string, NOT A Buffer
const read = name => new Promise( const read = (name) => new Promise(
(res, rej) => fs.readFile( (res, rej) => fs.readFile(
name, name,
(err, data) => (err ? rej(err) : res(data.toString())) (err, data) => (err ? rej(err) : res(data.toString()))
@ -13,7 +13,7 @@ const read = name => new Promise(
// (async) Write a file // (async) Write a file
const write = (name, text) => new Promise( const write = (name, text) => new Promise(
(res, rej) => fs.writeFile(name, text, err => (err ? rej(err) : res())) (res, rej) => fs.writeFile(name, text, (err) => (err ? rej(err) : res()))
); );
@ -21,7 +21,7 @@ const write = (name, text) => new Promise(
const copy = async (src, dest) => { const copy = async (src, dest) => {
try { try {
await new Promise( await new Promise(
(res, rej) => fs.copyFile(src, dest, err => (err ? rej(err) : res())) (res, rej) => fs.copyFile(src, dest, (err) => (err ? rej(err) : res()))
); );
} catch (e) { } catch (e) {
if (e.code !== 'EBUSY') { if (e.code !== 'EBUSY') {
@ -32,46 +32,46 @@ const copy = async (src, dest) => {
// (async) Check if a file/folder exists // (async) Check if a file/folder exists
const exists = name => new Promise( const exists = (name) => new Promise(
res => fs.access( (res) => fs.access(
name, name,
fs.constants.F_OK, fs.constants.F_OK,
err => res(err ? false : true) (err) => res(err ? false : true)
) )
); );
// (async) Create an empty folder // (async) Create an empty folder
const mkdir = async name => { const mkdir = async (name) => {
if (await exists(name)) { if (await exists(name)) {
return; return;
} }
return new Promise( return new Promise(
(res, rej) => fs.mkdir(name, err => (err ? rej(err) : res())) (res, rej) => fs.mkdir(name, (err) => (err ? rej(err) : res()))
); );
}; };
// (async) Get status on a file // (async) Get status on a file
const stat = name => new Promise( const stat = (name) => new Promise(
(res, rej) => fs.stat(name, (err, stats) => (err ? rej(err) : res(stats))) (res, rej) => fs.stat(name, (err, stats) => (err ? rej(err) : res(stats)))
); );
// (async) Check if the path is a folder // (async) Check if the path is a folder
const isDir = async name => (await stat(name)).isDirectory(); const isDir = async (name) => (await stat(name)).isDirectory();
// (async) Check if the path is a file // (async) Check if the path is a file
const isFile = async name => (await stat(name)).isFile(); const isFile = async (name) => (await stat(name)).isFile();
// Cut the path one folder up // Cut the path one folder up
const dirUp = dir => dir.replace(/\\/g, '/').split('/').slice(0, -1).join('/'); const dirUp = (dir) => dir.replace(/\\/g, '/').split('/').slice(0, -1).join('/');
// (async) Like `mkdir -p`, makes sure a directory exists // (async) Like `mkdir -p`, makes sure a directory exists
const ensuredir = async dir => { const ensuredir = async (dir) => {
if (await exists(dir) && await isDir(dir)) { if (await exists(dir) && await isDir(dir)) {
return; return;
} }
@ -88,7 +88,7 @@ const copysafe = async (src, dest) => {
// (async) Get file/folder names of the 1st level // (async) Get file/folder names of the 1st level
const readdir = name => new Promise( const readdir = (name) => new Promise(
(res, rej) => fs.readdir( (res, rej) => fs.readdir(
name, name,
(err, dirents) => (err ? rej(err) : res(dirents)) (err, dirents) => (err ? rej(err) : res(dirents))
@ -97,18 +97,18 @@ const readdir = name => new Promise(
// (async) Get folder paths (concatenated with input) of the 1st level // (async) Get folder paths (concatenated with input) of the 1st level
const subdirs = async name => { const subdirs = async (name) => {
const all = await readdir(name); const all = await readdir(name);
const mapped = await Promise.all(all.map(d => isDir(`${name}/${d}`))); const mapped = await Promise.all(all.map((d) => isDir(`${name}/${d}`)));
return all.filter((_, i) => mapped[i]); return all.filter((_, i) => mapped[i]);
}; };
// (async) Get file paths (concatenated with input) of the 1st level // (async) Get file paths (concatenated with input) of the 1st level
const subfiles = async name => { const subfiles = async (name) => {
const all = await readdir(name); const all = await readdir(name);
const mapped = await Promise.all(all.map(d => isFile(`${name}/${d}`))); const mapped = await Promise.all(all.map((d) => isFile(`${name}/${d}`)));
return all.filter((_, i) => mapped[i]).map(f => `${name}/${f}`); return all.filter((_, i) => mapped[i]).map((f) => `${name}/${f}`);
}; };
@ -121,7 +121,7 @@ const traverse = async (name, showDirs = false) => {
while (stack.length) { while (stack.length) {
const dir = stack.pop(); const dir = stack.pop();
dirs.push(dir); dirs.push(dir);
(await subdirs(dir)).forEach(d => stack.push(`${dir}/${d}`)); (await subdirs(dir)).forEach((d) => stack.push(`${dir}/${d}`));
} }
return (showDirs ? dirs : []).concat( return (showDirs ? dirs : []).concat(
...(await Promise.all(dirs.map(subfiles))) ...(await Promise.all(dirs.map(subfiles)))
@ -145,8 +145,8 @@ const copyall = async (src, dest) => {
// (async) Like `rm -rf`, removes everything recursively // (async) Like `rm -rf`, removes everything recursively
const rmdir = async name => { const rmdir = async (name) => {
if ( ! await exists(name) ) { if (!await exists(name)) {
return; return;
} }
const paths = await traverse(name, true); const paths = await traverse(name, true);
@ -156,7 +156,7 @@ const rmdir = async name => {
await new Promise( await new Promise(
(res, rej) => fs[dir ? 'rmdir' : 'unlink']( (res, rej) => fs[dir ? 'rmdir' : 'unlink'](
target, target,
err => (err ? rej(err) : res()) (err) => (err ? rej(err) : res())
) )
); );
} }
@ -164,12 +164,12 @@ const rmdir = async name => {
// (async) Remove a file. Must be a file, not a folder. Just `fs.unlink`. // (async) Remove a file. Must be a file, not a folder. Just `fs.unlink`.
const rm = async name => { const rm = async (name) => {
if ( ! await exists(name) ) { if (!await exists(name)) {
return; return;
} }
await new Promise( await new Promise(
(res, rej) => fs.unlink(name, err => (err ? rej(err) : res())) (res, rej) => fs.unlink(name, (err) => (err ? rej(err) : res()))
); );
}; };

View File

@ -2,25 +2,22 @@
const { Writable } = require('stream'); const { Writable } = require('stream');
const CHUNK_SIZE = 1024; const CHUNK_SIZE = 1024;
const INITIAL_SIZE = 8 * CHUNK_SIZE; const INITIAL_SIZE = 8 * CHUNK_SIZE;
const INCREMENT_SIZE = 8 * CHUNK_SIZE; const INCREMENT_SIZE = 8 * CHUNK_SIZE;
class WritableBuffer extends Writable { class WritableBuffer extends Writable {
constructor() { constructor() {
super(); super();
this._buffer = Buffer.alloc(INITIAL_SIZE); this._buffer = Buffer.alloc(INITIAL_SIZE);
this._size = 0; this._size = 0;
} }
get() { get() {
if (!this._size) {
if ( ! this._size ) {
return null; return null;
} }
@ -28,13 +25,11 @@ class WritableBuffer extends Writable {
this._buffer.copy(data, 0, 0, this._size); this._buffer.copy(data, 0, 0, this._size);
return data; return data;
} }
_increaseAsNeeded(incomingSize) { _increaseAsNeeded(incomingSize) {
if ((this._buffer.length - this._size) >= incomingSize) {
if ( (this._buffer.length - this._size) >= incomingSize ) {
return; return;
} }
@ -45,21 +40,17 @@ class WritableBuffer extends Writable {
this._buffer.copy(newBuffer, 0, 0, this._size); this._buffer.copy(newBuffer, 0, 0, this._size);
this._buffer = newBuffer; this._buffer = newBuffer;
} }
_write(chunk, encoding, callback) { _write(chunk, encoding, callback) {
this._increaseAsNeeded(chunk.length); this._increaseAsNeeded(chunk.length);
chunk.copy(this._buffer, this._size, 0); chunk.copy(this._buffer, this._size, 0);
this._size += chunk.length; this._size += chunk.length;
callback(); callback();
} }
} }
module.exports = WritableBuffer; module.exports = WritableBuffer;