Refactor to remove IDBFS & NODEFS, use Transferable to handle large files

This commit is contained in:
Jerome Wu
2020-01-13 22:07:47 +08:00
parent 1bacf193d9
commit 0f15f58554
20 changed files with 162 additions and 281 deletions

View File

@@ -1,12 +1,10 @@
const worker = require('../');
const getCore = require('./getCore');
const fs = require('../../worker/browser/fs');
global.addEventListener('message', ({ data }) => {
worker.dispatchHandlers(data, (obj) => postMessage(obj));
worker.dispatchHandlers(data, postMessage);
});
worker.setAdapter({
getCore,
fs,
});

View File

@@ -1,6 +1,7 @@
require('regenerator-runtime/runtime');
const defaultArgs = require('./constants/defaultArgs');
const strList2ptr = require('./utils/strList2ptr');
const getTransferables = require('../utils/getTransferables');
let action = 'unknown';
let Module = null;
@@ -26,89 +27,53 @@ const load = ({ workerId, payload: { options: { corePath } } }, res) => {
}
};
const syncfs = async ({
payload: {
populate = false,
},
}, res) => {
await Module.syncfs(populate);
res.resolve({ message: `Sync file system with populate=${populate}` });
};
const FS = ({
payload: {
method,
args,
},
}, res) => {
const data = Module.FS[method](...args);
res.resolve({
message: `${method} ${args.join(',')}`,
method,
data,
message: `Complete ${method}`,
data: Module.FS[method](...args),
});
};
const run = async ({
const run = ({
payload: {
args: _args,
options: {
input, output, del = true,
},
},
}, res) => {
const args = [...defaultArgs, ..._args.trim().split(' ')].filter((s) => s.length !== 0);
ffmpeg(args.length, strList2ptr(Module, args));
/*
* After executing the ffmpeg command, the data is saved in MEMFS,
* if `output` is specified in the options, here ffmpeg.js will move
* these files to IDBFS or NODEFS here.
*/
if (typeof output === 'string') {
await adapter.fs.writeFile(output, Module.FS.readFile(output));
Module.FS.unlink(output);
} else if (Array.isArray(output)) {
await Promise.all(output.map(async (p) => {
await adapter.fs.writeFile(p, Module.FS.readFile(p));
Module.FS.unlink(p);
}));
}
/*
* To prevent input files occupy filesystem without notice,
* if `input` is specified in the options, ffmpeg.js cleans these
* files for you
*/
if (del && typeof input === 'string') {
await adapter.fs.deleteFile(input);
} else if (del && Array.isArray(input)) {
await Promise.all(input.map((p) => adapter.fs.deleteFile(p)));
}
res.resolve({ message: `Complete ${args.join(' ')}` });
res.resolve({
message: `Complete ${args.join(' ')}`,
});
};
exports.dispatchHandlers = (packet, send) => {
const res = (status, data) => {
send({
...packet,
const { workerId, jobId, action: act } = packet;
const res = (status, payload) => {
const pkt = {
workerId,
jobId,
action: act,
status,
data,
});
payload,
};
send(pkt, getTransferables(pkt));
};
res.resolve = res.bind(this, 'resolve');
res.reject = res.bind(this, 'reject');
res.progress = res.bind(this, 'progress');
action = packet.action;
action = act;
try {
({
load,
syncfs,
FS,
run,
})[packet.action](packet, res);
})[act](packet, res);
} catch (err) {
/** Prepare exception to travel through postMessage */
res.reject(err.toString());

View File

@@ -1,12 +1,16 @@
const { parentPort } = require('worker_threads');
const worker = require('../');
const getCore = require('./getCore');
const fs = require('../../worker/node/fs');
process.on('message', (packet) => {
worker.dispatchHandlers(packet, (obj) => process.send(obj));
parentPort.on('message', (packet) => {
worker.dispatchHandlers(
packet,
(...args) => {
parentPort.postMessage(...args);
},
);
});
worker.setAdapter({
getCore,
fs,
});