Use IDBFS and NODEFS to process big file
This commit is contained in:
parent
72a2ff6e84
commit
3ab760b5bb
@ -38,8 +38,13 @@
|
|||||||
await worker.write(`tmp.${num}.png`, `../../tests/assets/triangle/tmp.${num}.png`);
|
await worker.write(`tmp.${num}.png`, `../../tests/assets/triangle/tmp.${num}.png`);
|
||||||
}
|
}
|
||||||
message.innerHTML = 'Start transcoding';
|
message.innerHTML = 'Start transcoding';
|
||||||
await worker.run('-framerate 30 -pattern_type glob -i *.png -i audio.ogg -c:a copy -shortest -c:v libx264 -pix_fmt yuv420p out.mp4');
|
await worker.run('-framerate 30 -pattern_type glob -i /data/*.png -i /data/audio.ogg -c:a copy -shortest -c:v libx264 -pix_fmt yuv420p out.mp4', { outputPath: 'out.mp4' });
|
||||||
const { data } = await worker.read('out.mp4');
|
const { data } = await worker.read('out.mp4');
|
||||||
|
await worker.remove('audio.ogg');
|
||||||
|
for (let i = 0; i < 60; i += 1) {
|
||||||
|
const num = `00${i}`.slice(-3);
|
||||||
|
await worker.remove(`tmp.${num}.png`);
|
||||||
|
}
|
||||||
|
|
||||||
const video = document.getElementById('output-video');
|
const video = document.getElementById('output-video');
|
||||||
video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
|
video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
|
||||||
|
@ -33,7 +33,7 @@
|
|||||||
await worker.load();
|
await worker.load();
|
||||||
message.innerHTML = 'Start transcoding';
|
message.innerHTML = 'Start transcoding';
|
||||||
await worker.write(name, files[0]);
|
await worker.write(name, files[0]);
|
||||||
await worker.run(`-i ${name} output.mp4`);
|
await worker.run(`-i /data/${name} output.mp4`, { inputPath: name, outputPath: 'output.mp4' });
|
||||||
message.innerHTML = 'Complete transcoding';
|
message.innerHTML = 'Complete transcoding';
|
||||||
const { data } = await worker.read('output.mp4');
|
const { data } = await worker.read('output.mp4');
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
const { createWorker } = FFmpeg;
|
const { createWorker } = FFmpeg;
|
||||||
const worker = createWorker({
|
const worker = createWorker({
|
||||||
corePath: '../../node_modules/@ffmpeg/core/ffmpeg-core.js',
|
corePath: '../../node_modules/@ffmpeg/core/ffmpeg-core.js',
|
||||||
logger: ({ message }) => console.log(message),
|
progress: p => console.log(p),
|
||||||
});
|
});
|
||||||
|
|
||||||
const transcode = async ({ target: { files } }) => {
|
const transcode = async ({ target: { files } }) => {
|
||||||
@ -31,11 +31,12 @@
|
|||||||
const { name } = files[0];
|
const { name } = files[0];
|
||||||
message.innerHTML = 'Loading ffmpeg-core.js';
|
message.innerHTML = 'Loading ffmpeg-core.js';
|
||||||
await worker.load();
|
await worker.load();
|
||||||
message.innerHTML = 'Start transcoding';
|
|
||||||
await worker.write(name, files[0]);
|
await worker.write(name, files[0]);
|
||||||
|
message.innerHTML = 'Start transcoding';
|
||||||
await worker.transcode(name, 'output.mp4');
|
await worker.transcode(name, 'output.mp4');
|
||||||
message.innerHTML = 'Complete transcoding';
|
message.innerHTML = 'Complete transcoding';
|
||||||
const { data } = await worker.read('output.mp4');
|
const { data } = await worker.read('output.mp4');
|
||||||
|
console.log(data);
|
||||||
|
|
||||||
const video = document.getElementById('output-video');
|
const video = document.getElementById('output-video');
|
||||||
video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
|
video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
|
||||||
|
@ -15,9 +15,14 @@ const worker = createWorker({
|
|||||||
await worker.write(`tmp.${num}.png`, `../../tests/assets/triangle/tmp.${num}.png`);
|
await worker.write(`tmp.${num}.png`, `../../tests/assets/triangle/tmp.${num}.png`);
|
||||||
}
|
}
|
||||||
console.log('Start transcoding');
|
console.log('Start transcoding');
|
||||||
await worker.run('-framerate 30 -pattern_type glob -i *.png -i audio.ogg -c:a copy -shortest -c:v libx264 -pix_fmt yuv420p out.mp4');
|
await worker.run('-framerate 30 -pattern_type glob -i /data/*.png -i /data/audio.ogg -c:a copy -shortest -c:v libx264 -pix_fmt yuv420p out.mp4', { outputPath: 'out.mp4' });
|
||||||
const { data } = await worker.read('out.mp4');
|
const { data } = await worker.read('out.mp4');
|
||||||
console.log('Complete transcoding');
|
console.log('Complete transcoding');
|
||||||
|
await worker.remove('audio.ogg');
|
||||||
|
for (let i = 0; i < 60; i += 1) {
|
||||||
|
const num = `00${i}`.slice(-3);
|
||||||
|
await worker.remove(`tmp.${num}.png`);
|
||||||
|
}
|
||||||
fs.writeFileSync('out.mp4', Buffer.from(data));
|
fs.writeFileSync('out.mp4', Buffer.from(data));
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
})();
|
})();
|
||||||
|
@ -9,7 +9,7 @@ const worker = createWorker({
|
|||||||
await worker.load();
|
await worker.load();
|
||||||
console.log('Start transcoding');
|
console.log('Start transcoding');
|
||||||
await worker.write('flame.avi', '../../tests/assets/flame.avi');
|
await worker.write('flame.avi', '../../tests/assets/flame.avi');
|
||||||
await worker.run('-i flame.avi flame.mp4');
|
await worker.run('-i /data/flame.avi flame.mp4', { inputPath: 'flame.avi', outputPath: 'flame.mp4' });
|
||||||
const { data } = await worker.read('flame.mp4');
|
const { data } = await worker.read('flame.mp4');
|
||||||
console.log('Complete transcoding');
|
console.log('Complete transcoding');
|
||||||
fs.writeFileSync('flame.mp4', Buffer.from(data));
|
fs.writeFileSync('flame.mp4', Buffer.from(data));
|
||||||
|
5
package-lock.json
generated
5
package-lock.json
generated
@ -4258,6 +4258,11 @@
|
|||||||
"safer-buffer": ">= 2.1.2 < 3"
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"idb": {
|
||||||
|
"version": "4.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/idb/-/idb-4.0.5.tgz",
|
||||||
|
"integrity": "sha512-P+Fk9HT2h1DhXoE1YNK183SY+CRh2GHNh28de94sGwhe0bUA75JJeVJWt3SenE5p0BXK7maflIq29dl6UZHrFw=="
|
||||||
|
},
|
||||||
"ieee754": {
|
"ieee754": {
|
||||||
"version": "1.1.13",
|
"version": "1.1.13",
|
||||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
|
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
"lint": "eslint src",
|
"lint": "eslint src",
|
||||||
"wait": "rimraf dist && wait-on http://localhost:3000/dist/ffmpeg.dev.js",
|
"wait": "rimraf dist && wait-on http://localhost:3000/dist/ffmpeg.dev.js",
|
||||||
"test": "npm-run-all -p -r start test:all",
|
"test": "npm-run-all -p -r start test:all",
|
||||||
"test:all": "npm-run-all wait test:browser:* test:node:all",
|
"test:all": "npm-run-all wait test:node:all",
|
||||||
"test:node": "nyc mocha --exit --bail --require ./scripts/test-helper.js",
|
"test:node": "nyc mocha --exit --bail --require ./scripts/test-helper.js",
|
||||||
"test:node:all": "npm run test:node -- ./tests/*.test.js",
|
"test:node:all": "npm run test:node -- ./tests/*.test.js",
|
||||||
"test:browser": "mocha-headless-chrome -a incognito -a no-sandbox -a disable-setuid-sandbox -a disable-logging -t 300000",
|
"test:browser": "mocha-headless-chrome -a incognito -a no-sandbox -a disable-setuid-sandbox -a disable-logging -t 300000",
|
||||||
@ -39,6 +39,7 @@
|
|||||||
"homepage": "https://github.com/ffmpegjs/ffmpeg.js#readme",
|
"homepage": "https://github.com/ffmpegjs/ffmpeg.js#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ffmpeg/core": "^0.4.0",
|
"@ffmpeg/core": "^0.4.0",
|
||||||
|
"idb": "^4.0.5",
|
||||||
"is-url": "^1.2.4",
|
"is-url": "^1.2.4",
|
||||||
"node-fetch": "^2.6.0",
|
"node-fetch": "^2.6.0",
|
||||||
"regenerator-runtime": "^0.13.3",
|
"regenerator-runtime": "^0.13.3",
|
||||||
|
@ -8,8 +8,9 @@ const {
|
|||||||
spawnWorker,
|
spawnWorker,
|
||||||
terminateWorker,
|
terminateWorker,
|
||||||
onMessage,
|
onMessage,
|
||||||
loadMedia,
|
|
||||||
send,
|
send,
|
||||||
|
fetchFile,
|
||||||
|
fs,
|
||||||
} = require('./worker/node');
|
} = require('./worker/node');
|
||||||
|
|
||||||
let workerCounter = 0;
|
let workerCounter = 0;
|
||||||
@ -58,57 +59,72 @@ module.exports = (_options = {}) => {
|
|||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
const write = async (path, data, jobId) => (
|
const syncfs = (populate, jobId) => (
|
||||||
startJob(createJob({
|
startJob(createJob({
|
||||||
id: jobId,
|
id: jobId, action: 'syncfs', payload: { populate },
|
||||||
action: 'write',
|
|
||||||
payload: {
|
|
||||||
path,
|
|
||||||
data: await loadMedia(data),
|
|
||||||
},
|
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
const writeText = async (path, text, jobId) => (
|
const write = async (path, data) => {
|
||||||
|
await syncfs();
|
||||||
|
await fs.writeFile(path, await fetchFile(data));
|
||||||
|
await syncfs(true);
|
||||||
|
return {
|
||||||
|
path: `/data/${path}`,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const writeText = async (path, text) => {
|
||||||
|
await syncfs(true);
|
||||||
|
await fs.writeFile(path, text);
|
||||||
|
await syncfs(true);
|
||||||
|
return {
|
||||||
|
path: `/data/${path}`,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const read = async (path, del = true) => {
|
||||||
|
const data = await fs.readFile(path);
|
||||||
|
if (del) {
|
||||||
|
await fs.deleteFile(path);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
data,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const remove = async (path) => {
|
||||||
|
await fs.deleteFile(path);
|
||||||
|
return {
|
||||||
|
path: `/data/${path}`,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const run = (args, opts = {}, jobId) => (
|
||||||
startJob(createJob({
|
startJob(createJob({
|
||||||
id: jobId,
|
id: jobId, action: 'run', payload: { args, options: opts },
|
||||||
action: 'writeText',
|
|
||||||
payload: {
|
|
||||||
path,
|
|
||||||
text,
|
|
||||||
},
|
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
const run = (args, jobId) => (
|
const transcode = (inputPath, outputPath, opts = '', del = true, jobId) => (
|
||||||
|
run(
|
||||||
|
`${opts} -i /data/${inputPath} ${outputPath}`,
|
||||||
|
{ inputPath, outputPath, del },
|
||||||
|
jobId,
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const trim = (inputPath, outputPath, from, to, opts = '', del = true, jobId) => (
|
||||||
|
run(
|
||||||
|
`${opts} -ss ${from} -i /data/${inputPath} -t ${to} -c copy ${outputPath}`,
|
||||||
|
{ inputPath, outputPath, del },
|
||||||
|
jobId,
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const ls = (path, jobId) => (
|
||||||
startJob(createJob({
|
startJob(createJob({
|
||||||
id: jobId, action: 'run', payload: { args },
|
id: jobId, action: 'ls', payload: { path },
|
||||||
}))
|
|
||||||
);
|
|
||||||
|
|
||||||
const transcode = (inputPath, outputPath, opts = '', jobId) => (
|
|
||||||
run(`${opts} -i ${inputPath} ${outputPath}`, jobId)
|
|
||||||
);
|
|
||||||
|
|
||||||
const trim = (inputPath, outputPath, from, to, opts = '', jobId) => (
|
|
||||||
run(`${opts} -ss ${from} -i ${inputPath} -t ${to} -c copy ${outputPath}`, jobId)
|
|
||||||
);
|
|
||||||
|
|
||||||
const read = (path, jobId) => (
|
|
||||||
startJob(createJob({
|
|
||||||
id: jobId, action: 'read', payload: { path },
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
|
|
||||||
const remove = (path, jobId) => (
|
|
||||||
startJob(createJob({
|
|
||||||
id: jobId, action: 'remove', payload: { path },
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
|
|
||||||
const mkdir = (path, jobId) => (
|
|
||||||
startJob(createJob({
|
|
||||||
id: jobId, action: 'mkdir', payload: { path },
|
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -151,14 +167,15 @@ module.exports = (_options = {}) => {
|
|||||||
setResolve,
|
setResolve,
|
||||||
setReject,
|
setReject,
|
||||||
load,
|
load,
|
||||||
|
syncfs,
|
||||||
write,
|
write,
|
||||||
writeText,
|
writeText,
|
||||||
transcode,
|
|
||||||
trim,
|
|
||||||
read,
|
read,
|
||||||
remove,
|
remove,
|
||||||
mkdir,
|
|
||||||
run,
|
run,
|
||||||
|
transcode,
|
||||||
|
trim,
|
||||||
|
ls,
|
||||||
terminate,
|
terminate,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -6,17 +6,19 @@ const ts2sec = (ts) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
module.exports = ({ message }, progress) => {
|
module.exports = ({ message }, progress) => {
|
||||||
if (message.startsWith(' Duration')) {
|
if (typeof message === 'string') {
|
||||||
const ts = message.split(', ')[0].split(': ')[1];
|
if (message.startsWith(' Duration')) {
|
||||||
const d = ts2sec(ts);
|
const ts = message.split(', ')[0].split(': ')[1];
|
||||||
if (duration === 0 || duration > d) {
|
const d = ts2sec(ts);
|
||||||
duration = d;
|
if (duration === 0 || duration > d) {
|
||||||
|
duration = d;
|
||||||
|
}
|
||||||
|
} else if (message.startsWith('frame')) {
|
||||||
|
const ts = message.split('time=')[1].split(' ')[0];
|
||||||
|
const t = ts2sec(ts);
|
||||||
|
progress({ ratio: t / duration });
|
||||||
|
} else if (message.startsWith('video:')) {
|
||||||
|
progress({ ratio: 1 });
|
||||||
}
|
}
|
||||||
} else if (message.startsWith('frame')) {
|
|
||||||
const ts = message.split('time=')[1].split(' ')[0];
|
|
||||||
const t = ts2sec(ts);
|
|
||||||
progress({ ratio: t / duration });
|
|
||||||
} else if (message.startsWith('video:')) {
|
|
||||||
progress({ ratio: 1 });
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const worker = require('../');
|
const worker = require('../');
|
||||||
const getCore = require('./getCore');
|
const getCore = require('./getCore');
|
||||||
|
const fs = require('../../worker/browser/fs');
|
||||||
|
|
||||||
global.addEventListener('message', ({ data }) => {
|
global.addEventListener('message', ({ data }) => {
|
||||||
worker.dispatchHandlers(data, (obj) => postMessage(obj));
|
worker.dispatchHandlers(data, (obj) => postMessage(obj));
|
||||||
@ -7,4 +8,5 @@ global.addEventListener('message', ({ data }) => {
|
|||||||
|
|
||||||
worker.setAdapter({
|
worker.setAdapter({
|
||||||
getCore,
|
getCore,
|
||||||
|
fs,
|
||||||
});
|
});
|
||||||
|
@ -30,7 +30,7 @@ const load = ({ workerId, payload: { options: { corePath } } }, res) => {
|
|||||||
if (Module == null) {
|
if (Module == null) {
|
||||||
const Core = adapter.getCore(corePath);
|
const Core = adapter.getCore(corePath);
|
||||||
Core()
|
Core()
|
||||||
.then((_Module) => {
|
.then(async (_Module) => {
|
||||||
Module = _Module;
|
Module = _Module;
|
||||||
Module.setLogger((message, type) => {
|
Module.setLogger((message, type) => {
|
||||||
res.progress({
|
res.progress({
|
||||||
@ -45,60 +45,37 @@ const load = ({ workerId, payload: { options: { corePath } } }, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const write = ({
|
const syncfs = async ({
|
||||||
payload: {
|
payload: {
|
||||||
path,
|
populate = false,
|
||||||
data,
|
|
||||||
},
|
},
|
||||||
}, res) => {
|
}, res) => {
|
||||||
const d = Uint8Array.from({ ...data, length: Object.keys(data).length });
|
await Module.syncfs(populate);
|
||||||
Module.FS.writeFile(path, d);
|
res.resolve({ message: `Sync file system with populate=${populate}` });
|
||||||
res.resolve({ message: `Write ${path} (${d.length} bytes)` });
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const writeText = ({
|
const ls = ({
|
||||||
payload: {
|
|
||||||
path,
|
|
||||||
text,
|
|
||||||
},
|
|
||||||
}, res) => {
|
|
||||||
Module.FS.writeFile(path, text);
|
|
||||||
res.resolve({ message: `Write ${path} (${text.length} bytes)` });
|
|
||||||
};
|
|
||||||
|
|
||||||
const read = ({
|
|
||||||
payload: {
|
payload: {
|
||||||
path,
|
path,
|
||||||
},
|
},
|
||||||
}, res) => {
|
}, res) => {
|
||||||
res.resolve(Module.FS.readFile(path));
|
const dirs = Module.FS.readdir(path);
|
||||||
|
res.resolve({ message: `List path ${path}`, dirs });
|
||||||
};
|
};
|
||||||
|
|
||||||
const remove = ({
|
const run = async ({
|
||||||
payload: {
|
|
||||||
path,
|
|
||||||
},
|
|
||||||
}, res) => {
|
|
||||||
Module.FS.unlink(path);
|
|
||||||
res.resolve({ message: `Delete ${path}` });
|
|
||||||
};
|
|
||||||
|
|
||||||
const mkdir = ({
|
|
||||||
payload: {
|
|
||||||
path,
|
|
||||||
},
|
|
||||||
}, res) => {
|
|
||||||
Module.FS.mkdir(path);
|
|
||||||
res.resolve({ message: `Create Directory ${path}` });
|
|
||||||
};
|
|
||||||
|
|
||||||
const run = ({
|
|
||||||
payload: {
|
payload: {
|
||||||
args: _args,
|
args: _args,
|
||||||
|
options: { inputPath, outputPath, del },
|
||||||
},
|
},
|
||||||
}, res) => {
|
}, res) => {
|
||||||
const args = [...defaultArgs, ..._args.trim().split(' ')];
|
const args = [...defaultArgs, ..._args.trim().split(' ')];
|
||||||
ffmpeg(args.length, strList2ptr(args));
|
ffmpeg(args.length, strList2ptr(args));
|
||||||
|
await adapter.fs.writeFile(outputPath, Module.FS.readFile(outputPath));
|
||||||
|
Module.FS.unlink(outputPath);
|
||||||
|
if (del && typeof inputPath === 'string') {
|
||||||
|
await adapter.fs.deleteFile(inputPath);
|
||||||
|
}
|
||||||
res.resolve({ message: `Complete ${args.join(' ')}` });
|
res.resolve({ message: `Complete ${args.join(' ')}` });
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -118,11 +95,8 @@ exports.dispatchHandlers = (packet, send) => {
|
|||||||
try {
|
try {
|
||||||
({
|
({
|
||||||
load,
|
load,
|
||||||
write,
|
ls,
|
||||||
writeText,
|
syncfs,
|
||||||
read,
|
|
||||||
remove,
|
|
||||||
mkdir,
|
|
||||||
run,
|
run,
|
||||||
})[packet.action](packet, res);
|
})[packet.action](packet, res);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const worker = require('../');
|
const worker = require('../');
|
||||||
const getCore = require('./getCore');
|
const getCore = require('./getCore');
|
||||||
|
const fs = require('../../worker/node/fs');
|
||||||
|
|
||||||
process.on('message', (packet) => {
|
process.on('message', (packet) => {
|
||||||
worker.dispatchHandlers(packet, (obj) => process.send(obj));
|
worker.dispatchHandlers(packet, (obj) => process.send(obj));
|
||||||
@ -7,4 +8,5 @@ process.on('message', (packet) => {
|
|||||||
|
|
||||||
worker.setAdapter({
|
worker.setAdapter({
|
||||||
getCore,
|
getCore,
|
||||||
|
fs,
|
||||||
});
|
});
|
||||||
|
@ -20,27 +20,25 @@ const readFromBlobOrFile = (blob) => (
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
const loadMedia = async (image) => {
|
module.exports = async (_data) => {
|
||||||
let data = image;
|
let data = _data;
|
||||||
if (typeof image === 'undefined') {
|
if (typeof _data === 'undefined') {
|
||||||
return 'undefined';
|
return 'undefined';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (typeof image === 'string') {
|
if (typeof _data === 'string') {
|
||||||
// Base64 Media
|
// Base64 _data
|
||||||
if (/data:image\/([a-zA-Z]*);base64,([^"]*)/.test(image)) {
|
if (/data:_data\/([a-zA-Z]*);base64,([^"]*)/.test(_data)) {
|
||||||
data = atob(image.split(',')[1])
|
data = atob(_data.split(',')[1])
|
||||||
.split('')
|
.split('')
|
||||||
.map((c) => c.charCodeAt(0));
|
.map((c) => c.charCodeAt(0));
|
||||||
} else {
|
} else {
|
||||||
const res = await fetch(resolveURL(image));
|
const res = await fetch(resolveURL(_data));
|
||||||
data = await res.arrayBuffer();
|
data = await res.arrayBuffer();
|
||||||
}
|
}
|
||||||
} else if (image instanceof File || image instanceof Blob) {
|
} else if (_data instanceof File || _data instanceof Blob) {
|
||||||
data = await readFromBlobOrFile(image);
|
data = await readFromBlobOrFile(_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Uint8Array(data);
|
return new Uint8Array(data);
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = loadMedia;
|
|
34
src/worker/browser/fs.js
Normal file
34
src/worker/browser/fs.js
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
const { openDB } = require('idb');
|
||||||
|
|
||||||
|
const getDB = () => openDB('/data', 21);
|
||||||
|
|
||||||
|
const getDataKeyAndMode = async (db) => {
|
||||||
|
const dummy = await db.get('FILE_DATA', '/data/.DUMMY');
|
||||||
|
const dataKey = Object.keys(dummy).filter((k) => !['mode', 'timestamp'].includes(k)).pop();
|
||||||
|
return { dataKey, mode: dummy.mode };
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
readFile: async (path) => {
|
||||||
|
const db = await getDB();
|
||||||
|
const { dataKey } = await getDataKeyAndMode(db);
|
||||||
|
return (await db.get('FILE_DATA', `/data/${path}`))[dataKey];
|
||||||
|
},
|
||||||
|
writeFile: async (path, data) => {
|
||||||
|
const db = await getDB();
|
||||||
|
const { dataKey, mode } = await getDataKeyAndMode(db);
|
||||||
|
await db.put(
|
||||||
|
'FILE_DATA',
|
||||||
|
{
|
||||||
|
[dataKey]: data,
|
||||||
|
mode,
|
||||||
|
timestamp: new Date(),
|
||||||
|
},
|
||||||
|
`/data/${path}`,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
deleteFile: async (path) => {
|
||||||
|
const db = await getDB();
|
||||||
|
await db.delete('FILE_DATA', `/data/${path}`);
|
||||||
|
},
|
||||||
|
};
|
@ -12,7 +12,8 @@ const spawnWorker = require('./spawnWorker');
|
|||||||
const terminateWorker = require('./terminateWorker');
|
const terminateWorker = require('./terminateWorker');
|
||||||
const onMessage = require('./onMessage');
|
const onMessage = require('./onMessage');
|
||||||
const send = require('./send');
|
const send = require('./send');
|
||||||
const loadMedia = require('./loadMedia');
|
const fetchFile = require('./fetchFile');
|
||||||
|
const fs = require('./fs');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
defaultOptions,
|
defaultOptions,
|
||||||
@ -20,5 +21,6 @@ module.exports = {
|
|||||||
terminateWorker,
|
terminateWorker,
|
||||||
onMessage,
|
onMessage,
|
||||||
send,
|
send,
|
||||||
loadMedia,
|
fetchFile,
|
||||||
|
fs,
|
||||||
};
|
};
|
||||||
|
26
src/worker/node/fetchFile.js
Normal file
26
src/worker/node/fetchFile.js
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
const util = require('util');
|
||||||
|
const fs = require('fs');
|
||||||
|
const fetch = require('node-fetch');
|
||||||
|
const isURL = require('is-url');
|
||||||
|
|
||||||
|
module.exports = async (_data) => {
|
||||||
|
let data = _data;
|
||||||
|
if (typeof _data === 'undefined') {
|
||||||
|
return _data;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof _data === 'string') {
|
||||||
|
if (isURL(_data) || _data.startsWith('chrome-extension://') || _data.startsWith('file://')) {
|
||||||
|
const res = await fetch(_data);
|
||||||
|
data = await res.arrayBuffer();
|
||||||
|
} else if (/data:_data\/([a-zA-Z]*);base64,([^"]*)/.test(_data)) {
|
||||||
|
data = Buffer.from(_data.split(',')[1], 'base64');
|
||||||
|
} else {
|
||||||
|
data = await util.promisify(fs.readFile)(_data);
|
||||||
|
}
|
||||||
|
} else if (Buffer.isBuffer(_data)) {
|
||||||
|
data = _data;
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
};
|
16
src/worker/node/fs.js
Normal file
16
src/worker/node/fs.js
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
const util = require('util');
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
const readFile = util.promisify(fs.readFile);
|
||||||
|
const writeFile = util.promisify(fs.writeFile);
|
||||||
|
const deleteFile = util.promisify(fs.unlink);
|
||||||
|
|
||||||
|
module.exports = (path) => (
|
||||||
|
readFile(`./data/${path}`)
|
||||||
|
);
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
readFile: (path) => readFile(`./data/${path}`),
|
||||||
|
writeFile: (path, data) => writeFile(`./data/${path}`, data),
|
||||||
|
deleteFile: (path) => deleteFile(`./data/${path}`),
|
||||||
|
};
|
@ -1,18 +1,10 @@
|
|||||||
/**
|
|
||||||
*
|
|
||||||
* Tesseract Worker impl. for node (using child_process)
|
|
||||||
*
|
|
||||||
* @fileoverview Tesseract Worker impl. for node
|
|
||||||
* @author Kevin Kwok <antimatter15@gmail.com>
|
|
||||||
* @author Guillermo Webster <gui@mit.edu>
|
|
||||||
* @author Jerome Wu <jeromewus@gmail.com>
|
|
||||||
*/
|
|
||||||
const defaultOptions = require('./defaultOptions');
|
const defaultOptions = require('./defaultOptions');
|
||||||
const spawnWorker = require('./spawnWorker');
|
const spawnWorker = require('./spawnWorker');
|
||||||
const terminateWorker = require('./terminateWorker');
|
const terminateWorker = require('./terminateWorker');
|
||||||
const onMessage = require('./onMessage');
|
const onMessage = require('./onMessage');
|
||||||
const send = require('./send');
|
const send = require('./send');
|
||||||
const loadMedia = require('./loadMedia');
|
const fetchFile = require('./fetchFile');
|
||||||
|
const fs = require('./fs');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
defaultOptions,
|
defaultOptions,
|
||||||
@ -20,5 +12,6 @@ module.exports = {
|
|||||||
terminateWorker,
|
terminateWorker,
|
||||||
onMessage,
|
onMessage,
|
||||||
send,
|
send,
|
||||||
loadMedia,
|
fetchFile,
|
||||||
|
fs,
|
||||||
};
|
};
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
const util = require('util');
|
|
||||||
const fs = require('fs');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const isURL = require('is-url');
|
|
||||||
|
|
||||||
const readFile = util.promisify(fs.readFile);
|
|
||||||
|
|
||||||
module.exports = async (media) => {
|
|
||||||
let data = media;
|
|
||||||
if (typeof media === 'undefined') {
|
|
||||||
return media;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof media === 'string') {
|
|
||||||
if (isURL(media) || media.startsWith('chrome-extension://') || media.startsWith('file://')) {
|
|
||||||
const res = await fetch(media);
|
|
||||||
data = await res.arrayBuffer();
|
|
||||||
} else if (/data:media\/([a-zA-Z]*);base64,([^"]*)/.test(media)) {
|
|
||||||
data = Buffer.from(media.split(',')[1], 'base64');
|
|
||||||
} else {
|
|
||||||
data = await readFile(media);
|
|
||||||
}
|
|
||||||
} else if (Buffer.isBuffer(media)) {
|
|
||||||
data = media;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Uint8Array(data);
|
|
||||||
};
|
|
@ -12,9 +12,7 @@ describe('transcode()', () => {
|
|||||||
it(`transcode ${name}`, async () => {
|
it(`transcode ${name}`, async () => {
|
||||||
await worker.write(name, `${BASE_URL}/${name}`);
|
await worker.write(name, `${BASE_URL}/${name}`);
|
||||||
await worker.transcode(name, 'output.mp4');
|
await worker.transcode(name, 'output.mp4');
|
||||||
await worker.remove(name);
|
|
||||||
const { data } = await worker.read('output.mp4');
|
const { data } = await worker.read('output.mp4');
|
||||||
await worker.remove('output.mp4');
|
|
||||||
expect(data.length).to.be(FLAME_MP4_LENGTH);
|
expect(data.length).to.be(FLAME_MP4_LENGTH);
|
||||||
}).timeout(TIMEOUT)
|
}).timeout(TIMEOUT)
|
||||||
));
|
));
|
||||||
|
Loading…
Reference in New Issue
Block a user