feat: refactor folder structure & add new codec parser and gen & add unit tests

This commit is contained in:
2025-03-25 02:38:00 +08:00
parent 42e36e3c68
commit 39a4cf2773
67 changed files with 2211 additions and 514 deletions

View File

@@ -1,2 +0,0 @@
public/video/huge/*
!public/video/huge/.gitkeep

View File

@@ -1,5 +1,5 @@
{
"name": "mock",
"name": "@konoplayer/mock",
"version": "0.1.0",
"private": true,
"scripts": {

2
apps/mock/public/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
video/huge/*
!video/huge/.gitkeep

View File

@@ -4,10 +4,8 @@
"composite": true,
"module": "CommonJS",
"moduleResolution": "node",
"declaration": true,
"emitDeclarationOnly": false,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"allowImportingTsExtensions": false,
"outDir": "./dist",
"rootDir": ".",

View File

@@ -1,6 +1,6 @@
{
"name": "playground",
"version": "1.0.0",
"name": "@konoplayer/playground",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
@@ -9,11 +9,11 @@
"preview": "rsbuild preview"
},
"dependencies": {
"konoebml": "0.1.2",
"lit": "^3.2.1"
"lit": "^3.2.1",
"@konoplayer/core": "workspace:*",
"@konoplayer/matroska": "workspace:*"
},
"devDependencies": {
"@rsbuild/core": "^1.2.14",
"typescript": "^5.8.2"
"@rsbuild/core": "^1.2.14"
}
}

View File

@@ -1,65 +0,0 @@
export interface RangedStream {
controller: AbortController;
response: Response;
body: ReadableStream;
totalSize?: number;
}
export interface CreateRangedStreamOptions {
url: string;
byteStart?: number;
byteEnd?: number;
}
export async function createRangedStream({
url,
byteStart = 0,
byteEnd,
}: CreateRangedStreamOptions) {
const controller = new AbortController();
const signal = controller.signal;
const headers = new Headers();
headers.append(
'Range',
typeof byteEnd === 'number'
? `bytes=${byteStart}-${byteEnd}`
: `bytes=${byteStart}-`
);
const response = await fetch(url, { signal, headers });
if (!response.ok) {
throw new Error('fetch video stream failed');
}
const acceptRanges = response.headers.get('Accept-Ranges');
if (acceptRanges !== 'bytes') {
throw new Error('video server does not support byte ranges');
}
const body = response.body;
if (!(body instanceof ReadableStream)) {
throw new Error('can not get readable stream from response.body');
}
const contentRange = response.headers.get('Content-Range');
//
// Content-Range Header Syntax:
// Content-Range: <unit> <range-start>-<range-end>/<size>
// Content-Range: <unit> <range-start>-<range-end>/*
// Content-Range: <unit> */<size>
//
const totalSize = contentRange
? Number.parseInt(contentRange.split('/')[1], 10)
: undefined;
return {
controller,
response,
body,
totalSize,
};
}

View File

@@ -1,60 +0,0 @@
export interface RangedStream {
controller: AbortController;
response: Response;
body: ReadableStream<Uint8Array>;
totalSize?: number;
}
export async function createRangedStream(
url: string,
byteStart = 0,
byteEnd?: number
) {
const controller = new AbortController();
const signal = controller.signal;
const headers = new Headers();
headers.append(
'Range',
typeof byteEnd === 'number'
? `bytes=${byteStart}-${byteEnd}`
: `bytes=${byteStart}-`
);
const response = await fetch(url, { signal, headers });
if (!response.ok) {
throw new Error('fetch video stream failed');
}
const acceptRanges = response.headers.get('Accept-Ranges');
if (acceptRanges !== 'bytes') {
throw new Error('video server does not support byte ranges');
}
const body = response.body;
if (!(body instanceof ReadableStream)) {
throw new Error('can not get readable stream from response.body');
}
const contentRange = response.headers.get('Content-Range');
//
// Content-Range Header Syntax:
// Content-Range: <unit> <range-start>-<range-end>/<size>
// Content-Range: <unit> <range-start>-<range-end>/*
// Content-Range: <unit> */<size>
//
const totalSize = contentRange
? Number.parseInt(contentRange.split('/')[1], 10)
: undefined;
return {
controller,
response,
body,
totalSize,
};
}

View File

@@ -1,32 +0,0 @@
export enum AudioCodec {
Unknown = 0,
AAC = 1,
MP3 = 2,
PCM = 3,
Vorbis = 4,
FLAC = 5,
AMR_NB = 6,
AMR_WB = 7,
PCM_MULAW = 8,
GSM_MS = 9,
PCM_S16BE = 10,
PCM_S24BE = 11,
Opus = 12,
EAC3 = 13,
PCM_ALAW = 14,
ALAC = 15,
AC3 = 16,
MpegHAudio = 17,
DTS = 18,
DTSXP2 = 19,
DTSE = 20,
AC4 = 21,
IAMF = 22,
PCM_S32BE = 23,
PCM_S32LE = 24,
PCM_S24LE = 25,
PCM_S16LE = 26,
PCM_F32BE = 27,
PCM_F32LE = 28,
MaxValue = PCM_F32LE, // Must equal the last "real" codec above.
}

View File

@@ -1,25 +0,0 @@
export class UnsupportedCodecError extends Error {
constructor(codec: string, context: string) {
super(`codec ${codec} is not supported in ${context} context`);
}
}
export class ParseCodecPrivateError extends Error {
constructor(codec: string, detail: string) {
super(`code ${codec} private parse failed: ${detail}`);
}
}
export class UnreachableOrLogicError extends Error {
constructor(detail: string) {
super(`unreachable or logic error: ${detail}`)
}
}
export class ParseCodecErrors extends Error {
cause: Error[] = [];
constructor() {
super('failed to parse codecs');
}
}

View File

@@ -1,97 +0,0 @@
export enum VideoCodec {
Unknown = 0,
H264 = 1,
VC1 = 2,
MPEG2 = 3,
MPEG4 = 4,
Theora = 5,
VP8 = 6,
VP9 = 7,
HEVC = 8,
DolbyVision = 9,
AV1 = 10,
MaxValue = AV1, // Must equal the last "real" codec above.
}
export enum VideoCodecProfile {
VIDEO_CODEC_PROFILE_UNKNOWN = -1,
VIDEO_CODEC_PROFILE_MIN = VIDEO_CODEC_PROFILE_UNKNOWN,
H264PROFILE_MIN = 0,
H264PROFILE_BASELINE = H264PROFILE_MIN,
H264PROFILE_MAIN = 1,
H264PROFILE_EXTENDED = 2,
H264PROFILE_HIGH = 3,
H264PROFILE_HIGH10PROFILE = 4,
H264PROFILE_HIGH422PROFILE = 5,
H264PROFILE_HIGH444PREDICTIVEPROFILE = 6,
H264PROFILE_SCALABLEBASELINE = 7,
H264PROFILE_SCALABLEHIGH = 8,
H264PROFILE_STEREOHIGH = 9,
H264PROFILE_MULTIVIEWHIGH = 10,
H264PROFILE_MAX = H264PROFILE_MULTIVIEWHIGH,
VP8PROFILE_MIN = 11,
VP8PROFILE_ANY = VP8PROFILE_MIN,
VP8PROFILE_MAX = VP8PROFILE_ANY,
VP9PROFILE_MIN = 12,
VP9PROFILE_PROFILE0 = VP9PROFILE_MIN,
VP9PROFILE_PROFILE1 = 13,
VP9PROFILE_PROFILE2 = 14,
VP9PROFILE_PROFILE3 = 15,
VP9PROFILE_MAX = VP9PROFILE_PROFILE3,
HEVCPROFILE_MIN = 16,
HEVCPROFILE_MAIN = HEVCPROFILE_MIN,
HEVCPROFILE_MAIN10 = 17,
HEVCPROFILE_MAIN_STILL_PICTURE = 18,
HEVCPROFILE_MAX = HEVCPROFILE_MAIN_STILL_PICTURE,
DOLBYVISION_PROFILE0 = 19,
// Deprecated: DOLBYVISION_PROFILE4 = 20,
DOLBYVISION_PROFILE5 = 21,
DOLBYVISION_PROFILE7 = 22,
THEORAPROFILE_MIN = 23,
THEORAPROFILE_ANY = THEORAPROFILE_MIN,
THEORAPROFILE_MAX = THEORAPROFILE_ANY,
AV1PROFILE_MIN = 24,
AV1PROFILE_PROFILE_MAIN = AV1PROFILE_MIN,
AV1PROFILE_PROFILE_HIGH = 25,
AV1PROFILE_PROFILE_PRO = 26,
AV1PROFILE_MAX = AV1PROFILE_PROFILE_PRO,
DOLBYVISION_PROFILE8 = 27,
DOLBYVISION_PROFILE9 = 28,
HEVCPROFILE_EXT_MIN = 29,
HEVCPROFILE_REXT = HEVCPROFILE_EXT_MIN,
HEVCPROFILE_HIGH_THROUGHPUT = 30,
HEVCPROFILE_MULTIVIEW_MAIN = 31,
HEVCPROFILE_SCALABLE_MAIN = 32,
HEVCPROFILE_3D_MAIN = 33,
HEVCPROFILE_SCREEN_EXTENDED = 34,
HEVCPROFILE_SCALABLE_REXT = 35,
HEVCPROFILE_HIGH_THROUGHPUT_SCREEN_EXTENDED = 36,
HEVCPROFILE_EXT_MAX = HEVCPROFILE_HIGH_THROUGHPUT_SCREEN_EXTENDED,
VVCPROFILE_MIN = 37,
VVCPROFILE_MAIN10 = VVCPROFILE_MIN,
VVCPROFILE_MAIN12 = 38,
VVCPROFILE_MAIN12_INTRA = 39,
VVCPROIFLE_MULTILAYER_MAIN10 = 40,
VVCPROFILE_MAIN10_444 = 41,
VVCPROFILE_MAIN12_444 = 42,
VVCPROFILE_MAIN16_444 = 43,
VVCPROFILE_MAIN12_444_INTRA = 44,
VVCPROFILE_MAIN16_444_INTRA = 45,
VVCPROFILE_MULTILAYER_MAIN10_444 = 46,
VVCPROFILE_MAIN10_STILL_PICTURE = 47,
VVCPROFILE_MAIN12_STILL_PICTURE = 48,
VVCPROFILE_MAIN10_444_STILL_PICTURE = 49,
VVCPROFILE_MAIN12_444_STILL_PICTURE = 50,
VVCPROFILE_MAIN16_444_STILL_PICTURE = 51,
VVCPROFILE_MAX = VVCPROFILE_MAIN16_444_STILL_PICTURE,
VIDEO_CODEC_PROFILE_MAX = VVCPROFILE_MAIN16_444_STILL_PICTURE,
}
export type VideoCodecLevel = number; // uint32
export const NoVideoCodecLevel: VideoCodecLevel = 0;
export type VideoCodecProfileLevel = {
codec: VideoCodec;
profile: VideoCodecProfile;
level: VideoCodecLevel;
};

View File

@@ -1,113 +0,0 @@
import { ParseCodecPrivateError } from '@/media/base/errors';
import { ArkErrors, type } from 'arktype';
export const AAC_CODEC_TYPE = 'AAC';
export const AudioObjectTypeSchema = type('1 | 2 | 3 | 4 | 5 | 29 | 67');
export const SamplingFrequencyIndexSchema = type('1|2|3|4|5|6|7|8|9|10|11|12');
export const ChannelConfigurationSchema = type('1 | 2 | 3 | 4 | 5 | 6 | 7');
export const AudioSpecificConfigSchema = type({
audioObjectType: AudioObjectTypeSchema, // AAC profiles: Main, LC, SSR, LTP, HE, HE v2
samplingFrequencyIndex: SamplingFrequencyIndexSchema.optional(), // Sampling rate index
channelConfiguration: ChannelConfigurationSchema, // Channel config (1-7)
sbrPresent: type.boolean.optional(), // Optional: Indicates SBR presence
psPresent: type.boolean.optional(), // Optional: Indicates PS presence (for HE-AAC v2)
});
export type AudioSpecificConfigType = typeof AudioSpecificConfigSchema.infer;
/**
* Parse AudioSpecificConfig from codec_private Uint8Array
* @param codecPrivate - Uint8Array containing codec_private data
* @returns Parsed AudioSpecificConfig or throws an error if invalid
*/
export function parseAudioSpecificConfig(
codecPrivate: Uint8Array
): AudioSpecificConfigType {
if (codecPrivate.length < 2) {
throw new ParseCodecPrivateError(
AAC_CODEC_TYPE,
'codec_private data too short'
);
}
// Create a DataView for bit-level manipulation
const view = new DataView(
codecPrivate.buffer,
codecPrivate.byteOffset,
codecPrivate.byteLength
);
let byteOffset = 0;
let bitOffset = 0;
// Helper function to read specific number of bits
function readBits(bits: number): number {
let value = 0;
for (let i = 0; i < bits; i++) {
const byte = view.getUint8(byteOffset);
const bit = (byte >> (7 - bitOffset)) & 1;
value = (value << 1) | bit;
bitOffset++;
if (bitOffset === 8) {
bitOffset = 0;
byteOffset++;
}
}
return value;
}
// Read 5 bits for audioObjectType
const audioObjectType = readBits(5);
// Read 4 bits for samplingFrequencyIndex
const samplingFrequencyIndex = readBits(4);
// Read 4 bits for channelConfiguration
const channelConfiguration = readBits(4);
// Check for SBR/PS extension (if audioObjectType indicates HE-AAC)
let sbrPresent = false;
let psPresent = false;
if (audioObjectType === 5 || audioObjectType === 29) {
sbrPresent = true;
if (audioObjectType === 29) {
psPresent = true; // HE-AAC v2 includes Parametric Stereo
}
// Skip extension-specific bits if present (simplified here)
// In real cases, additional parsing may be needed
}
// Construct the result object
const config: AudioSpecificConfigType = {
audioObjectType:
audioObjectType as AudioSpecificConfigType['audioObjectType'],
samplingFrequencyIndex:
samplingFrequencyIndex as AudioSpecificConfigType['samplingFrequencyIndex'],
channelConfiguration:
channelConfiguration as AudioSpecificConfigType['channelConfiguration'],
...(sbrPresent && { sbrPresent }),
...(psPresent && { psPresent }),
};
// Validate with arktype
const validation = AudioSpecificConfigSchema(config);
if (validation instanceof ArkErrors) {
const error = new ParseCodecPrivateError(
AAC_CODEC_TYPE,
'Invalid AudioSpecificConfig'
);
error.cause = validation;
throw error;
}
return config;
}
export function genCodecIdByAudioSpecificConfig(
config: AudioSpecificConfigType
) {
return `mp4a.40.${config.audioObjectType}`;
}

View File

@@ -1,125 +0,0 @@
import { ParseCodecPrivateError } from '@/media/base/errors';
import { type } from 'arktype';
export const AVC_CODEC_TYPE = 'h264(AVC)';
export const AVCDecoderConfigurationRecordSchema = type({
configurationVersion: type.number, // Configuration version, typically 1
avcProfileIndication: type.number, // AVC profile
profileCompatibility: type.number, // Profile compatibility
avcLevelIndication: type.number, // AVC level
lengthSizeMinusOne: type.number, // NAL unit length field size minus 1
sps: type
.instanceOf(Uint8Array<ArrayBufferLike>)
.array()
.atLeastLength(1), // Sequence Parameter Sets (SPS)
pps: type
.instanceOf(Uint8Array<ArrayBufferLike>)
.array()
.atLeastLength(1), // Picture Parameter Sets (PPS)
});
export type AVCDecoderConfigurationRecordType =
typeof AVCDecoderConfigurationRecordSchema.infer;
/**
* Parse AVCDecoderConfigurationRecord from codec_private Uint8Array
* @param codecPrivate - Uint8Array containing codec_private data
* @returns Parsed AVCDecoderConfigurationRecord or throws an error if invalid
*/
export function parseAVCDecoderConfigurationRecord(
codecPrivate: Uint8Array
): AVCDecoderConfigurationRecordType {
let offset = 0;
// Check if data length is sufficient
if (codecPrivate.length < 5) {
throw new ParseCodecPrivateError(
AVC_CODEC_TYPE,
'Input data too short for AVCDecoderConfigurationRecord'
);
}
const configurationVersion = codecPrivate[offset++];
const avcProfileIndication = codecPrivate[offset++];
const profileCompatibility = codecPrivate[offset++];
const avcLevelIndication = codecPrivate[offset++];
// Read lengthSizeMinusOne (first 6 bits are reserved, typically 0xFF, last 2 bits are the value)
const lengthSizeMinusOne = codecPrivate[offset++] & 0x03;
// Read number of SPS (first 3 bits are reserved, typically 0xE0, last 5 bits are SPS count)
const numOfSPS = codecPrivate[offset++] & 0x1f;
const sps: Uint8Array[] = [];
// Parse SPS
for (let i = 0; i < numOfSPS; i++) {
if (offset + 2 > codecPrivate.length) {
throw new ParseCodecPrivateError(AVC_CODEC_TYPE, 'Invalid SPS length');
}
const spsLength = (codecPrivate[offset] << 8) | codecPrivate[offset + 1];
offset += 2;
if (offset + spsLength > codecPrivate.length) {
throw new ParseCodecPrivateError(
AVC_CODEC_TYPE,
'SPS data exceeds buffer length'
);
}
sps.push(codecPrivate.subarray(offset, offset + spsLength));
offset += spsLength;
}
// Read number of PPS
if (offset >= codecPrivate.length) {
throw new ParseCodecPrivateError(AVC_CODEC_TYPE, 'No space for PPS count');
}
const numOfPPS = codecPrivate[offset++];
const pps: Uint8Array[] = [];
// Parse PPS
for (let i = 0; i < numOfPPS; i++) {
if (offset + 2 > codecPrivate.length) {
throw new ParseCodecPrivateError(AVC_CODEC_TYPE, 'Invalid PPS length');
}
const ppsLength = (codecPrivate[offset] << 8) | codecPrivate[offset + 1];
offset += 2;
if (offset + ppsLength > codecPrivate.length) {
throw new ParseCodecPrivateError(
AVC_CODEC_TYPE,
'PPS data exceeds buffer length'
);
}
pps.push(codecPrivate.subarray(offset, offset + ppsLength));
offset += ppsLength;
}
return {
configurationVersion,
avcProfileIndication,
profileCompatibility,
avcLevelIndication,
lengthSizeMinusOne,
sps,
pps,
};
}
export function genCodecIdByAVCDecoderConfigurationRecord(
config: AVCDecoderConfigurationRecordType
): string {
const profileHex = config.avcProfileIndication.toString(16).padStart(2, '0');
const profileCompatHex = config.profileCompatibility
.toString(16)
.padStart(2, '0');
const levelHex = (config.avcLevelIndication / 10)
.toString(16)
.replace(/./g, '')
.padStart(2, '0');
return `avc1.${profileHex}${profileCompatHex}${levelHex}`;
}

View File

@@ -1,144 +0,0 @@
import { ParseCodecPrivateError } from '@/media/base/errors';
import { ArkErrors, type } from 'arktype';
export const HEVC_CODEC_TYPE = 'h265(HEVC)';
export const HEVCDecoderConfigurationRecordArraySchema = type({
arrayCompleteness: type.boolean,
reserved: type.number,
NALUnitType: type.number,
numNalus: type.number,
nalUnits: type.instanceOf(Uint8Array<ArrayBufferLike>).array(),
});
export type HEVCDecoderConfigurationRecordArrayType =
typeof HEVCDecoderConfigurationRecordArraySchema.infer;
// Define the schema for HEVCDecoderConfigurationRecord
export const HEVCDecoderConfigurationRecordSchema = type({
configurationVersion: type.number, // Must be 1
generalProfileSpace: type.number,
generalTierFlag: type.boolean,
generalProfileIdc: type.number,
generalProfileCompatibilityFlags: type.number,
generalConstraintIndicatorFlags: type.number.array().exactlyLength(6), // Fixed 6-byte array
generalLevelIdc: type.number,
reserved1: type.number, // 4 bits reserved, must be 1111
minSpatialSegmentationIdc: type.number,
reserved2: type.number, // 6 bits reserved, must be 111111
parallelismType: type.number,
chromaFormat: type.number,
bitDepthLumaMinus8: type.number,
bitDepthChromaMinus8: type.number,
avgFrameRate: type.number,
constantFrameRate: type.number,
numTemporalLayers: type.number,
temporalIdNested: type.boolean,
lengthSizeMinusOne: type.number,
numOfArrays: type.number,
arrays: HEVCDecoderConfigurationRecordArraySchema.array(),
});
export type HEVCDecoderConfigurationRecordType =
typeof HEVCDecoderConfigurationRecordSchema.infer;
/**
* Parse HEVCDecoderConfigurationRecord from codec_private Uint8Array
* @param codecPrivate - Uint8Array containing codec_private data
* @returns Parsed HEVCDecoderConfigurationRecord or throws an error if invalid
*/
export function parseHEVCDecoderConfigurationRecord(
codecPrivate: Uint8Array
): HEVCDecoderConfigurationRecordType {
let offset = 0;
// Read and validate basic fields
const config: HEVCDecoderConfigurationRecordType = {
configurationVersion: codecPrivate[offset++],
generalProfileSpace: codecPrivate[offset] >> 6,
generalTierFlag: Boolean(codecPrivate[offset] & 0x20),
generalProfileIdc: codecPrivate[offset++] & 0x1f,
generalProfileCompatibilityFlags:
(codecPrivate[offset] << 24) |
(codecPrivate[offset + 1] << 16) |
(codecPrivate[offset + 2] << 8) |
codecPrivate[offset + 3],
generalConstraintIndicatorFlags: Array.from(
codecPrivate.subarray(offset + 4, offset + 10)
),
generalLevelIdc: codecPrivate[offset + 10],
reserved1: (codecPrivate[offset + 11] & 0xf0) >> 4, // 4 bits
minSpatialSegmentationIdc:
((codecPrivate[offset + 11] & 0x0f) << 8) | codecPrivate[offset + 12],
reserved2: (codecPrivate[offset + 13] & 0xfc) >> 2, // 6 bits
parallelismType: codecPrivate[offset + 13] & 0x03,
chromaFormat: (codecPrivate[offset + 14] & 0xe0) >> 5,
bitDepthLumaMinus8: (codecPrivate[offset + 14] & 0x1c) >> 2,
bitDepthChromaMinus8: codecPrivate[offset + 14] & 0x03,
avgFrameRate: (codecPrivate[offset + 15] << 8) | codecPrivate[offset + 16],
constantFrameRate: (codecPrivate[offset + 17] & 0xc0) >> 6,
numTemporalLayers: (codecPrivate[offset + 17] & 0x38) >> 3,
temporalIdNested: Boolean(codecPrivate[offset + 17] & 0x04),
lengthSizeMinusOne: codecPrivate[offset + 17] & 0x03,
numOfArrays: codecPrivate[offset + 18],
arrays: [],
};
offset += 19;
// Parse NAL unit arrays
const arrays = config.arrays;
for (let i = 0; i < config.numOfArrays; i++) {
const array: HEVCDecoderConfigurationRecordArrayType = {
arrayCompleteness: Boolean(codecPrivate[offset] & 0x80),
reserved: (codecPrivate[offset] & 0x40) >> 6,
NALUnitType: codecPrivate[offset] & 0x3f,
numNalus: (codecPrivate[offset + 1] << 8) | codecPrivate[offset + 2],
nalUnits: [] as Uint8Array<ArrayBufferLike>[],
};
offset += 3;
for (let j = 0; j < array.numNalus; j++) {
const nalUnitLength =
(codecPrivate[offset] << 8) | codecPrivate[offset + 1];
offset += 2;
array.nalUnits.push(
codecPrivate.subarray(offset, offset + nalUnitLength)
);
offset += nalUnitLength;
}
arrays.push(array);
}
const result = { ...config, arrays };
// Validate using arktype
const validation = HEVCDecoderConfigurationRecordSchema(result);
if (validation instanceof ArkErrors) {
const error = new ParseCodecPrivateError(
HEVC_CODEC_TYPE,
'Invalid HEVC configuration record'
);
error.cause = validation;
throw error;
}
return result;
}
export function genCodecStringByHEVCDecoderConfigurationRecord(
config: HEVCDecoderConfigurationRecordType
) {
const profileSpace =
config.generalProfileSpace === 0
? ''
: String.fromCharCode(65 + config.generalProfileSpace - 1);
const profileIdcHex = config.generalProfileIdc.toString(16);
const tier = config.generalTierFlag ? '7' : '6';
const levelMajor = Math.floor(config.generalLevelIdc / 30);
const levelMinor =
config.generalLevelIdc % 30 === 0 ? '0' : (config.generalLevelIdc % 30) / 3;
const levelStr = `L${config.generalLevelIdc.toString().padStart(3, '0')}`;
const constraint = '00';
return `hev1.${profileSpace}${profileIdcHex}.${tier}.${levelStr}.${constraint}`;
}

View File

@@ -1,290 +0,0 @@
import { AudioCodec } from '../../base/audio_codecs';
import { UnsupportedCodecError } from '../../base/errors';
import { VideoCodec } from '../../base/video_codecs';
import type { TrackEntryType } from '../schema';
import {
genCodecIdByAudioSpecificConfig,
parseAudioSpecificConfig,
} from './aac';
import {
genCodecIdByAVCDecoderConfigurationRecord,
parseAVCDecoderConfigurationRecord,
} from './avc';
import type {ProbeInfo} from "@/media/mkv/enhance/probe.ts";
export const VideoCodecId = {
VCM: 'V_MS/VFW/FOURCC',
UNCOMPRESSED: 'V_UNCOMPRESSED',
MPEG4_ISO_SP: 'V_MPEG4/ISO/SP',
MPEG4_ISO_ASP: 'V_MPEG4/ISO/ASP',
MPEG4_ISO_AP: 'V_MPEG4/ISO/AP',
MPEG4_MS_V3: 'V_MPEG4/MS/V3',
MPEG1: 'V_MPEG1',
MPEG2: 'V_MPEG2',
H264: 'V_MPEG4/ISO/AVC',
HEVC: 'V_MPEGH/ISO/HEVC',
AVS2: 'V_AVS2',
AVS3: 'V_AVS3',
RV10: 'V_REAL/RV10',
RV20: 'V_REAL/RV20',
RV30: 'V_REAL/RV30',
RV40: 'V_REAL/RV40',
QUICKTIME: 'V_QUICKTIME',
THEORA: 'V_THEORA',
PROPRES: 'V_PRORES',
VP8: 'V_VP8',
VP9: 'V_VP9',
FFV1: 'V_FFV1',
AV1: 'V_AV1',
} as const;
export type VideoCodecIdType =
| `${(typeof VideoCodecId)[keyof typeof VideoCodecId]}`
| string;
export const AudioCodecId = {
MPEG_L3: 'A_MPEG/L3',
MPEG_L2: 'A_MPEG/L2',
MPEG_L1: 'A_MPEG/L1',
PCM_INT_BIG: 'A_PCM/INT/BIG',
PCM_INT_LIT: 'A_PCM/INT/LIT',
PCM_FLOAT_IEEE: 'A_PCM/FLOAT/IEEE',
MPC: 'A_MPC',
AC3: 'A_AC3',
AC3_BSID9: 'A_AC3/BSID9',
AC3_BSID10: 'A_AC3/BSID10',
ALAC: 'A_ALAC',
DTS: 'A_DTS',
DTS_EXPRESS: 'A_DTS/EXPRESS',
DTS_LOSSLESS: 'A_DTS/LOSSLESS',
VORBIS: 'A_VORBIS',
OPUS: 'A_OPUS',
FLAC: 'A_FLAC',
EAC3: 'A_EAC3',
REAL_14_4: 'A_REAL/14_4',
REAL_28_8: 'A_REAL/28_8',
REAL_COOK: 'A_REAL/COOK',
REAL_SIPR: 'A_REAL/SIPR',
REAL_RALF: 'A_REAL/RALF',
REAL_ATRC: 'A_REAL/ATRC',
MS_ACM: 'A_MS/ACM',
AAC: 'A_AAC',
AAC_MPEG2_MAIN: 'A_AAC/MPEG2/MAIN',
AAC_MPEG2_LC: 'A_AAC/MPEG2/LC',
AAC_MPEG2_LC_SBR: 'A_AAC/MPEG2/LC/SBR',
AAC_MPEG2_SSR: 'A_AAC/MPEG2/SSR',
AAC_MPEG4_MAIN: 'A_AAC/MPEG4/MAIN',
AAC_MPEG4_LC: 'A_AAC/MPEG4/LC',
AAC_MPEG4_SBR: 'A_AAC/MPEG4/LC/SBR',
AAC_MPEG4_SSR: 'A_AAC/MPEG4/SSR',
AAC_MPEG4_LTP: 'A_AAC/MPEG4/LTP',
QUICKTIME: 'A_QUICKTIME',
QDMC: 'A_QUICKTIME/QDMC',
QDM2: 'A_QUICKTIME/QDM2',
TTA1: 'A_TTA1',
WAVEPACK4: 'A_WAVPACK4',
ATRAC: 'A_ATRAC/AT1',
} as const;
export type AudioCodecIdType =
| `${(typeof AudioCodecId)[keyof typeof AudioCodecId]}`
| string;
export const SubtitleCodecId = {
UTF8: 'S_TEXT/UTF8',
SSA: 'S_TEXT/SSA',
ASS: 'S_TEXT/ASS',
WEBVTT: 'S_TEXT/WEBVTT',
BMP: 'S_IMAGE/BMP',
DVBSUB: 'S_DVBSUB',
VOBSUB: 'S_VOBSUB',
HDMV_PGS: 'S_HDMV/PGS',
HDMV_TEXTST: 'S_HDMV/TEXTST',
KATE: 'S_KATE',
ARIBSUB: 'S_ARIBSUB',
} as const;
export type SubtitleCodecIdType =
| `${(typeof SubtitleCodecId)[keyof typeof SubtitleCodecId]}`
| string;
export interface VideoDecoderConfigExt extends VideoDecoderConfig {
codecType: VideoCodec,
}
export function videoCodecIdToWebCodecs(
track: TrackEntryType,
_probeInfo?: ProbeInfo
): VideoDecoderConfigExt {
const codecId = track.CodecID;
const codecPrivate = track.CodecPrivate;
const shareOptions = {
description: codecPrivate
}
switch (codecId) {
case VideoCodecId.HEVC:
return { ...shareOptions, codecType: VideoCodec.HEVC, codec: 'hevc' };
case VideoCodecId.VP9:
return { ...shareOptions, codecType: VideoCodec.VP9, codec: 'vp09' };
case VideoCodecId.AV1:
return { ...shareOptions, codecType: VideoCodec.AV1, codec: 'av1' };
case VideoCodecId.H264:
if (!codecPrivate) {
throw new UnsupportedCodecError(
'h264(without codec_private profile)',
'web codecs audio decoder'
);
}
return {
...shareOptions,
codecType: VideoCodec.H264,
codec: genCodecIdByAVCDecoderConfigurationRecord(
parseAVCDecoderConfigurationRecord(codecPrivate)
)
};
case VideoCodecId.THEORA:
return { ...shareOptions, codecType: VideoCodec.Theora, codec: 'theora' };
case VideoCodecId.VP8:
return { ...shareOptions, codecType: VideoCodec.VP8, codec: 'vp8' };
case VideoCodecId.MPEG4_ISO_SP:
return { ...shareOptions, codecType: VideoCodec.MPEG4, codec: 'mp4v.01.3' };
case VideoCodecId.MPEG4_ISO_ASP:
return { ...shareOptions, codecType: VideoCodec.MPEG4, codec: 'mp4v.20.9' };
case VideoCodecId.MPEG4_ISO_AP:
return { ...shareOptions, codecType: VideoCodec.MPEG4, codec: 'mp4v.20.9' };
default:
throw new UnsupportedCodecError(codecId, 'web codecs video decoder');
}
}
export interface AudioDecoderConfigExt extends AudioDecoderConfig {
codecType: AudioCodec,
}
export function audioCodecIdToWebCodecs(
track: TrackEntryType,
_probeInfo?: ProbeInfo
): AudioDecoderConfigExt {
const codecId = track.CodecID;
const codecPrivate = track.CodecPrivate;
const bitDepth = track.Audio?.BitDepth;
const numberOfChannels = Number(track.Audio?.Channels);
const sampleRate = Number(track.Audio?.SamplingFrequency);
const shareOptions = {
numberOfChannels,
sampleRate,
description: codecPrivate
}
switch (track.CodecID) {
case AudioCodecId.AAC_MPEG4_MAIN:
case AudioCodecId.AAC_MPEG2_MAIN:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: 'mp4a.40.1'
};
case AudioCodecId.AAC_MPEG2_LC:
case AudioCodecId.AAC_MPEG4_LC:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: 'mp4a.40.2'
};
case AudioCodecId.AAC_MPEG2_SSR:
case AudioCodecId.AAC_MPEG4_SSR:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: 'mp4a.40.3'
};
case AudioCodecId.AAC_MPEG4_LTP:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: 'mp4a.40.4'
};
case AudioCodecId.AAC_MPEG2_LC_SBR:
case AudioCodecId.AAC_MPEG4_SBR:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: 'mp4a.40.5'
};
case AudioCodecId.AAC:
return {
...shareOptions,
codecType: AudioCodec.AAC,
codec: codecPrivate
? genCodecIdByAudioSpecificConfig(
parseAudioSpecificConfig(codecPrivate)
) : 'mp4a.40.2',
};
case AudioCodecId.AC3:
case AudioCodecId.AC3_BSID9:
return {
...shareOptions,
codecType: AudioCodec.AC3,
codec: 'ac-3'
};
case AudioCodecId.EAC3:
case AudioCodecId.AC3_BSID10:
return {
...shareOptions,
codecType: AudioCodec.EAC3,
codec: 'ec-3'
};
case AudioCodecId.MPEG_L3:
return {
...shareOptions,
codecType: AudioCodec.MP3,
codec: 'mp3'
};
case AudioCodecId.VORBIS:
return { ...shareOptions, codecType: AudioCodec.Vorbis, codec: 'vorbis' }
;
case AudioCodecId.FLAC:
return { ...shareOptions, codecType: AudioCodec.FLAC, codec: 'flac' }
;
case AudioCodecId.OPUS:
return { ...shareOptions, codecType: AudioCodec.Opus, codec: 'opus' }
;
case AudioCodecId.ALAC:
return { ...shareOptions, codecType: AudioCodec.ALAC, codec: 'alac' }
;
case AudioCodecId.PCM_INT_BIG:
if (bitDepth === 16) {
return { ...shareOptions, codecType: AudioCodec.PCM_S16BE, codec: 'pcm-s16be' };
}
if (bitDepth === 24) {
return { ...shareOptions, codecType: AudioCodec.PCM_S24BE, codec: 'pcm-s24be' };
}
if (bitDepth === 32) {
return { ...shareOptions, codecType: AudioCodec.PCM_S32BE, codec: 'pcm-s32be' };
}
throw new UnsupportedCodecError(
`${codecId}(${bitDepth}b)`,
'web codecs audio decoder'
);
case AudioCodecId.PCM_INT_LIT:
if (bitDepth === 16) {
return { ...shareOptions, codecType: AudioCodec.PCM_S16LE, codec: 'pcm-s16le' };
}
if (bitDepth === 24) {
return { ...shareOptions, codecType: AudioCodec.PCM_S24LE, codec: 'pcm-s24le' };
}
if (bitDepth === 32) {
return { ...shareOptions, codecType: AudioCodec.PCM_S32LE, codec: 'pcm-s32le' };
}
throw new UnsupportedCodecError(
`${codecId}(${bitDepth}b)`,
'web codecs audio decoder'
);
case AudioCodecId.PCM_FLOAT_IEEE:
return { ...shareOptions, codecType: AudioCodec.PCM_F32LE, codec: 'pcm-f32le' };
default:
throw new UnsupportedCodecError(codecId, 'web codecs audio decoder');
}
}

View File

@@ -1,21 +0,0 @@
import { type } from 'arktype';
import type {TrackEntryType} from "@/media/mkv/schema.ts";
export const VP9DecoderProfileSchema = type('0 | 1 | 2 | 3');
export const VP9DecoderConfigurationRecordSchema = type({
profile: VP9DecoderProfileSchema,
level: type.number,
bitDepth: type.number,
});
export type VP9DecoderConfigurationRecordType =
typeof VP9DecoderConfigurationRecordSchema.infer;
export function parseVP9DecoderConfigurationRecord(track: TrackEntryType) {
const pixelWidth = Number(track.Video?.PixelWidth);
const pixelHeight = Number(track.Video?.PixelHeight);
const pixels = pixelWidth * pixelHeight;
const bitDepth = Number(track.Video?.Colour?.BitsPerChannel) || 10;
}

View File

@@ -1,3 +0,0 @@
export interface ProbeInfo {
}

View File

@@ -1,507 +0,0 @@
import {
type EbmlClusterTagType,
type EbmlCuePointTagType,
type EbmlCuesTagType,
type EbmlInfoTagType,
type EbmlMasterTagType,
type EbmlSeekHeadTagType,
type EbmlSegmentTagType,
EbmlTagIdEnum,
EbmlTagPosition,
type EbmlTagsTagType,
type EbmlTagTagType,
type EbmlTagType,
type EbmlTrackEntryTagType,
type EbmlTracksTagType,
} from 'konoebml';
import { convertEbmlTagToComponent, type InferType } from './util';
import { isEqual, maxBy } from 'lodash-es';
import { ArkErrors, type Type } from 'arktype';
import {
ClusterSchema,
type ClusterType,
CuePointSchema,
type CuePointType,
type CueTrackPositionsType,
InfoSchema,
type InfoType,
SeekHeadSchema,
type SeekHeadType,
TagSchema,
type TagType,
TrackEntrySchema,
type TrackEntryType, TrackTypeRestrictionEnum,
} from './schema';
import {concatBufs} from "konoebml/lib/tools";
import {ParseCodecErrors, UnreachableOrLogicError, UnsupportedCodecError} from "@/media/base/errors.ts";
import type {ProbeInfo} from "@/media/mkv/enhance/probe.ts";
import {audioCodecIdToWebCodecs, videoCodecIdToWebCodecs} from "@/media/mkv/codecs";
import {Queue} from "mnemonist";
import {BehaviorSubject} from "rxjs";
export const SEEK_ID_KAX_INFO = new Uint8Array([0x15, 0x49, 0xa9, 0x66]);
export const SEEK_ID_KAX_TRACKS = new Uint8Array([0x16, 0x54, 0xae, 0x6b]);
export const SEEK_ID_KAX_CUES = new Uint8Array([0x1c, 0x53, 0xbb, 0x6b]);
export const SEEK_ID_KAX_TAGS = new Uint8Array([0x12, 0x54, 0xc3, 0x67]);
export class SegmentSystem {
startTag: EbmlSegmentTagType;
headTags: EbmlTagType[] = [];
teeStream: ReadableStream<Uint8Array>
teeBufferTask: Promise<Uint8Array>;
firstCluster: EbmlClusterTagType | undefined;
probInfo?: ProbeInfo;
cue: CueSystem;
cluster: ClusterSystem;
seek: SeekSystem;
info: InfoSystem;
track: TrackSystem;
tag: TagSystem;
constructor(startNode: EbmlSegmentTagType, teeStream: ReadableStream<Uint8Array>) {
this.startTag = startNode;
this.cue = new CueSystem(this);
this.cluster = new ClusterSystem(this);
this.seek = new SeekSystem(this);
this.info = new InfoSystem(this);
this.track = new TrackSystem(this);
this.tag = new TagSystem(this);
this.teeStream = teeStream;
this.teeBufferTask = this.teeWaitingProbingData(teeStream);
}
private async teeWaitingProbingData (teeStream: ReadableStream<Uint8Array>): Promise<Uint8Array> {
const reader = teeStream.getReader();
const list: Uint8Array<ArrayBufferLike>[] = [];
while (true) {
try {
const { done, value } = await reader.read();
if (done) {
break;
}
list.push(value);
} catch (e: any) {
if (e?.name === 'AbortError') {
break;
}
throw e;
}
}
return concatBufs(...list)
}
get contentStartOffset() {
return this.startTag.startOffset + this.startTag.headerLength;
}
private seekLocal () {
const infoTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_INFO);
const tracksTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_TRACKS);
const cuesTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_CUES);
const tagsTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_TAGS);
if (cuesTag?.id === EbmlTagIdEnum.Cues) {
this.cue.prepareCuesWithTag(cuesTag);
}
if (infoTag?.id === EbmlTagIdEnum.Info) {
this.info.prepareWithInfoTag(infoTag);
}
if (tracksTag?.id === EbmlTagIdEnum.Tracks) {
this.track.prepareTracksWithTag(tracksTag);
}
if (tagsTag?.id === EbmlTagIdEnum.Tags) {
this.tag.prepareTagsWithTag(tagsTag);
}
}
scanMeta(tag: EbmlTagType) {
if (
tag.id === EbmlTagIdEnum.SeekHead &&
tag.position === EbmlTagPosition.End
) {
this.seek.addSeekHeadTag(tag);
}
this.headTags.push(tag);
this.seek.memoTag(tag);
if (tag.id === EbmlTagIdEnum.Cluster && !this.firstCluster) {
this.firstCluster = tag;
this.seekLocal();
}
return this;
}
async completeMeta () {
this.seekLocal();
await this.parseCodes();
return this;
}
async fetchProbeInfo (_payload: Uint8Array): Promise<ProbeInfo> {
// call local or remote ff-probe
return {}
}
async parseCodes () {
const candidates = this.track.tracks.filter(c => c.TrackType === TrackTypeRestrictionEnum.AUDIO || c.TrackType === TrackTypeRestrictionEnum.VIDEO);
const parseErrors = new ParseCodecErrors();
if (!this.probInfo) {
for (const t of candidates) {
try {
await this.track.initTrack(t, undefined)
} catch (e: unknown) {
parseErrors.cause.push(e as Error)
}
}
if (parseErrors.cause.length > 0) {
try {
const teeBuffer = await this.teeBufferTask;
this.probInfo = await this.fetchProbeInfo(teeBuffer);
} catch (e) {
parseErrors.cause.push(e as Error);
return;
}
} else {
return;
}
}
for (const t of candidates) {
try {
await this.track.initTrack(t, this.probInfo)
} catch (e) {
parseErrors.cause.push(e as Error)
}
}
if (parseErrors.cause.length > 0) {
console.error(parseErrors);
}
}
}
export type SegmentComponent<T> = T & {
get segment(): SegmentSystem;
};
export function withSegment<T extends object>(
component: T,
segment: SegmentSystem
): SegmentComponent<T> {
const component_ = component as T & { segment: SegmentSystem };
component_.segment = segment;
return component_;
}
export class SegmentComponentSystemTrait<
E extends EbmlMasterTagType,
S extends Type<any>,
> {
segment: SegmentSystem;
get schema(): S {
throw new Error('unimplemented!');
}
constructor(segment: SegmentSystem) {
this.segment = segment;
}
componentFromTag(tag: E): SegmentComponent<InferType<S>> {
const extracted = convertEbmlTagToComponent(tag);
const result = this.schema(extracted) as
| (InferType<S> & { segment: SegmentSystem })
| ArkErrors;
if (result instanceof ArkErrors) {
const errors = result;
console.error(
'Parse component from tag error:',
tag.toDebugRecord(),
errors.flatProblemsByPath
);
throw errors;
}
result.segment = this.segment;
return result;
}
}
export class SeekSystem extends SegmentComponentSystemTrait<
EbmlSeekHeadTagType,
typeof SeekHeadSchema
> {
override get schema() {
return SeekHeadSchema;
}
seekHeads: SeekHeadType[] = [];
private offsetToTagMemo: Map<number, EbmlTagType> = new Map();
memoTag(tag: EbmlTagType) {
this.offsetToTagMemo.set(tag.startOffset, tag);
}
addSeekHeadTag(tag: EbmlSeekHeadTagType) {
const seekHead = this.componentFromTag(tag);
this.seekHeads.push(seekHead);
return seekHead;
}
offsetFromSeekPosition(position: number): number {
return position + this.segment.contentStartOffset;
}
seekTagByStartOffset(
startOffset: number | undefined
): EbmlTagType | undefined {
return startOffset! >= 0
? this.offsetToTagMemo.get(startOffset!)
: undefined;
}
seekOffsetBySeekId(seekId: Uint8Array): number | undefined {
const seekPosition = this.seekHeads[0]?.Seek?.find((c) =>
isEqual(c.SeekID, seekId)
)?.SeekPosition;
return seekPosition! >= 0
? this.offsetFromSeekPosition(seekPosition! as number)
: undefined;
}
seekTagBySeekId(seekId: Uint8Array): EbmlTagType | undefined {
return this.seekTagByStartOffset(this.seekOffsetBySeekId(seekId));
}
get firstClusterOffset () {
if (!this.segment.firstCluster) {
throw new UnreachableOrLogicError("first cluster not found")
}
return this.segment.firstCluster.startOffset;
}
}
export class InfoSystem extends SegmentComponentSystemTrait<
EbmlInfoTagType,
typeof InfoSchema
> {
override get schema() {
return InfoSchema;
}
info!: SegmentComponent<InfoType>;
prepareWithInfoTag(tag: EbmlInfoTagType) {
this.info = this.componentFromTag(tag);
return this;
}
}
export class ClusterSystem extends SegmentComponentSystemTrait<
EbmlClusterTagType,
typeof ClusterSchema
> {
override get schema() {
return ClusterSchema;
}
clustersBuffer: SegmentComponent<ClusterType>[] = [];
addClusterWithTag(tag: EbmlClusterTagType) {
const cluster = this.componentFromTag(tag);
this.clustersBuffer.push(cluster);
return cluster;
}
}
export interface GetTrackEntryOptions {
priority?: (v: SegmentComponent<TrackEntryType>) => number;
predicate?: (v: SegmentComponent<TrackEntryType>) => boolean;
}
export interface TrackState<Decoder, Config, Frame> {
decoder: Decoder,
configuration?: Config,
frameBuffer$: BehaviorSubject<Queue<Frame>>
}
export class TrackSystem extends SegmentComponentSystemTrait<
EbmlTrackEntryTagType,
typeof TrackEntrySchema
> {
override get schema() {
return TrackEntrySchema;
}
tracks: SegmentComponent<TrackEntryType>[] = [];
videoTrackState = new WeakMap<TrackEntryType, TrackState< VideoDecoder, VideoDecoderConfig, VideoFrame>>();
audioTrackState = new WeakMap<TrackEntryType, TrackState<AudioDecoder, AudioDecoderConfig, AudioData>>();
getTrackEntry({
priority = (track) =>
(Number(!!track.FlagForced) << 4) + Number(!!track.FlagDefault),
predicate = (track) => track.FlagEnabled !== 0,
}: GetTrackEntryOptions) {
return this.tracks
.filter(predicate)
.toSorted((a, b) => priority(b) - priority(a))
.at(0);
}
prepareTracksWithTag(tag: EbmlTracksTagType) {
this.tracks = tag.children
.filter((c) => c.id === EbmlTagIdEnum.TrackEntry)
.map((c) => this.componentFromTag(c));
return this;
}
async initTrack (track: TrackEntryType, probe?: ProbeInfo) {
if (track.TrackType === TrackTypeRestrictionEnum.AUDIO) {
const configuration = audioCodecIdToWebCodecs(track, probe);
if (await AudioDecoder.isConfigSupported(configuration)) {
throw new UnsupportedCodecError(configuration.codec, 'audio decoder')
}
const queue$ = new BehaviorSubject(new Queue<AudioData>());
this.audioTrackState.set(track, {
configuration,
decoder: new AudioDecoder({
output: (audioData) => {
const queue = queue$.getValue();
queue.enqueue(audioData);
queue$.next(queue);
},
error: (e) => {
queue$.error(e);
},
}),
frameBuffer$: queue$,
})
} else if (track.TrackType === TrackTypeRestrictionEnum.VIDEO) {
const configuration = videoCodecIdToWebCodecs(track, probe);
if (await VideoDecoder.isConfigSupported(configuration)) {
throw new UnsupportedCodecError(configuration.codec, 'audio decoder')
}
const queue$ = new BehaviorSubject(new Queue<VideoFrame>());
this.videoTrackState.set(track, {
configuration,
decoder: new VideoDecoder({
output: (audioData) => {
const queue = queue$.getValue();
queue.enqueue(audioData);
queue$.next(queue);
},
error: (e) => {
queue$.error(e);
},
}),
frameBuffer$: queue$,
})
}
}
}
export class CueSystem extends SegmentComponentSystemTrait<
EbmlCuePointTagType,
typeof CuePointSchema
> {
override get schema() {
return CuePointSchema;
}
cues: SegmentComponent<CuePointType>[] = [];
prepareCuesWithTag(tag: EbmlCuesTagType) {
this.cues = tag.children
.filter((c) => c.id === EbmlTagIdEnum.CuePoint)
.map(this.componentFromTag.bind(this));
return this;
}
findClosestCue(seekTime: number): CuePointType | undefined {
const cues = this.cues;
if (!cues || cues.length === 0) {
return undefined;
}
let left = 0;
let right = cues.length - 1;
if (seekTime <= cues[0].CueTime) {
return cues[0];
}
if (seekTime >= cues[right].CueTime) {
return cues[right];
}
while (left <= right) {
const mid = Math.floor((left + right) / 2);
if (cues[mid].CueTime === seekTime) {
return cues[mid];
}
if (cues[mid].CueTime < seekTime) {
left = mid + 1;
} else {
right = mid - 1;
}
}
const before = cues[right];
const after = cues[left];
return Math.abs((before.CueTime as number) - seekTime) <
Math.abs((after.CueTime as number) - seekTime)
? before
: after;
}
getCueTrackPositions(
cuePoint: CuePointType,
track?: number
): CueTrackPositionsType {
let cueTrackPositions: CueTrackPositionsType | undefined;
if (track! >= 0) {
cueTrackPositions = cuePoint.CueTrackPositions.find(
(c) => c.CueTrack === track
);
}
if (!cueTrackPositions) {
cueTrackPositions = maxBy(
cuePoint.CueTrackPositions,
(c) => c.CueClusterPosition
)!;
}
return cueTrackPositions;
}
get prepared(): boolean {
return this.cues.length > 0;
}
}
export class TagSystem extends SegmentComponentSystemTrait<
EbmlTagTagType,
typeof TagSchema
> {
override get schema() {
return TagSchema;
}
tags: SegmentComponent<TagType>[] = [];
prepareTagsWithTag(tag: EbmlTagsTagType) {
this.tags = tag.children
.filter((c) => c.id === EbmlTagIdEnum.Tag)
.map((c) => this.componentFromTag(c));
return this;
}
get prepared(): boolean {
return this.tags.length > 0;
}
}

View File

@@ -1,402 +0,0 @@
import {EbmlStreamDecoder, EbmlTagIdEnum, EbmlTagPosition, type EbmlTagType,} from 'konoebml';
import {
defer,
EMPTY,
filter,
finalize,
from,
isEmpty,
map,
merge,
Observable,
of,
reduce,
scan,
share,
shareReplay,
switchMap,
take,
takeWhile,
withLatestFrom,
} from 'rxjs';
import {createRangedStream, type CreateRangedStreamOptions} from '@/fetch';
import {type CueSystem, SEEK_ID_KAX_CUES, SEEK_ID_KAX_TAGS, type SegmentComponent, SegmentSystem,} from './model';
import {isTagIdPos, waitTick} from './util';
import type {ClusterType} from './schema';
export interface CreateRangedEbmlStreamOptions
extends CreateRangedStreamOptions {
tee?: boolean;
}
export function createRangedEbmlStream({
url,
byteStart = 0,
byteEnd,
tee = false
}: CreateRangedEbmlStreamOptions): Observable<{
ebml$: Observable<EbmlTagType>;
totalSize?: number;
response: Response;
body: ReadableStream<Uint8Array>;
controller: AbortController;
teeBody: ReadableStream<Uint8Array> | undefined;
}> {
const stream$ = from(createRangedStream({ url, byteStart, byteEnd }));
return stream$.pipe(
switchMap(({ controller, body, totalSize, response }) => {
let requestCompleted = false;
let teeStream: ReadableStream<Uint8Array> | undefined;
let stream: ReadableStream<Uint8Array>;
if (tee) {
[stream, teeStream] = body.tee();
} else {
stream = body;
}
const originRequest$ = new Observable<EbmlTagType>((subscriber) => {
stream
.pipeThrough(
new EbmlStreamDecoder({
streamStartOffset: byteStart,
collectChild: (child) => child.id !== EbmlTagIdEnum.Cluster,
backpressure: {
eventLoop: waitTick,
},
})
)
.pipeTo(
new WritableStream({
write: async (tag) => {
await waitTick();
subscriber.next(tag);
},
close: () => {
if (!requestCompleted) {
requestCompleted = true;
subscriber.complete();
}
},
})
)
.catch((error) => {
if (requestCompleted && error?.name === 'AbortError') {
return;
}
requestCompleted = true;
subscriber.error(error);
});
return () => {
requestCompleted = true;
controller.abort();
};
}).pipe(
share({
resetOnComplete: false,
resetOnError: false,
resetOnRefCountZero: true,
})
);
const ebml$ = defer(() =>
requestCompleted ? EMPTY : originRequest$
).pipe(
share({
resetOnError: false,
resetOnComplete: true,
resetOnRefCountZero: true,
})
);
return of({
ebml$,
totalSize,
response,
body: stream,
teeBody: teeStream,
controller,
});
})
);
}
export interface CreateEbmlControllerOptions
extends Omit<CreateRangedEbmlStreamOptions, 'byteStart' | 'byteEnd'> {}
export function createEbmlController({
url,
...options
}: CreateEbmlControllerOptions) {
const metaRequest$ = createRangedEbmlStream({
...options,
url,
byteStart: 0,
tee: true
});
const controller$ = metaRequest$.pipe(
map(({ totalSize, ebml$, response, controller, teeBody }) => {
const head$ = ebml$.pipe(
filter(isTagIdPos(EbmlTagIdEnum.EBML, EbmlTagPosition.End)),
take(1),
shareReplay(1)
);
console.debug(
`stream of video "${url}" created, total size is ${totalSize ?? 'unknown'}`
);
const segmentStart$ = ebml$.pipe(
filter(isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.Start))
);
/**
* while [matroska v4](https://www.matroska.org/technical/elements.html) doc tell that there is only one segment in a file
* some mkv generated by strange tools will emit several
*/
const segments$ = segmentStart$.pipe(
map((startTag) => {
const segment = new SegmentSystem(startTag, teeBody!);
const clusterSystem = segment.cluster;
const seekSystem = segment.seek;
const meta$ = ebml$.pipe(
scan((acc, tag) => {
// avoid object recreation
acc.hasKeyframe = acc.hasKeyframe || (tag.id === EbmlTagIdEnum.SimpleBlock && tag.keyframe) || (tag.id === EbmlTagIdEnum.BlockGroup && tag.children.every(c => c.id !== EbmlTagIdEnum.ReferenceBlock));
acc.tag = tag;
return acc;
}, { hasKeyframe: false, tag: undefined as unknown as EbmlTagType }),
takeWhile(
({ tag, hasKeyframe }) => {
return !isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End)(tag) && !(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)(tag) && hasKeyframe);
},
true
),
map(({ tag }) => tag),
share({
resetOnComplete: false,
resetOnError: false,
resetOnRefCountZero: true,
})
);
const withMeta$ = meta$.pipe(
reduce((segment, meta) => segment.scanMeta(meta), segment),
switchMap(() => segment.completeMeta()),
take(1),
shareReplay(1)
);
const withRemoteCues$ = withMeta$.pipe(
switchMap((s) => {
const cueSystem = s.cue;
const seekSystem = s.seek;
if (cueSystem.prepared) {
return EMPTY;
}
const remoteCuesTagStartOffset =
seekSystem.seekOffsetBySeekId(SEEK_ID_KAX_CUES);
if (remoteCuesTagStartOffset! >= 0) {
return createRangedEbmlStream({
...options,
url,
byteStart: remoteCuesTagStartOffset,
}).pipe(
switchMap((req) => req.ebml$),
filter(isTagIdPos(EbmlTagIdEnum.Cues, EbmlTagPosition.End)),
withLatestFrom(withMeta$),
map(([cues, withMeta]) => {
withMeta.cue.prepareCuesWithTag(cues);
return withMeta;
})
);
}
return EMPTY;
}),
take(1),
shareReplay(1)
);
const withLocalCues$ = withMeta$.pipe(
switchMap((s) => (s.cue.prepared ? of(s) : EMPTY)),
shareReplay(1)
);
const withRemoteTags$ = withMeta$.pipe(
switchMap((s) => {
const tagSystem = s.tag;
const seekSystem = s.seek;
if (tagSystem.prepared) {
return EMPTY;
}
const remoteTagsTagStartOffset =
seekSystem.seekOffsetBySeekId(SEEK_ID_KAX_TAGS);
if (remoteTagsTagStartOffset! >= 0) {
return createRangedEbmlStream({
...options,
url,
byteStart: remoteTagsTagStartOffset,
}).pipe(
switchMap((req) => req.ebml$),
filter(isTagIdPos(EbmlTagIdEnum.Tags, EbmlTagPosition.End)),
withLatestFrom(withMeta$),
map(([tags, withMeta]) => {
withMeta.tag.prepareTagsWithTag(tags);
return withMeta;
})
);
}
return EMPTY;
}),
take(1),
shareReplay(1)
);
const withLocalTags$ = withMeta$.pipe(
switchMap((s) => (s.tag.prepared ? of(s) : EMPTY)),
shareReplay(1)
);
const withCues$ = merge(withLocalCues$, withRemoteCues$).pipe(
take(1)
);
const withoutCues$ = withCues$.pipe(
isEmpty(),
switchMap((empty) => (empty ? withMeta$ : EMPTY))
);
const withTags$ = merge(withLocalTags$, withRemoteTags$).pipe(
take(1)
);
const withoutTags$ = withTags$.pipe(
isEmpty(),
switchMap((empty) => (empty ? withMeta$ : EMPTY))
);
const seekWithoutCues = (
seekTime: number
): Observable<SegmentComponent<ClusterType>> => {
const request$ = withMeta$.pipe(
switchMap(() =>
createRangedEbmlStream({
...options,
url,
byteStart: seekSystem.firstClusterOffset,
})
)
);
const cluster$ = request$.pipe(
switchMap((req) => req.ebml$),
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
map((tag) => clusterSystem.addClusterWithTag(tag))
);
if (seekTime === 0) {
return cluster$;
}
return cluster$.pipe(
scan(
(acc, curr) => {
// avoid object recreation
acc.prev = acc.next;
acc.next = curr;
return acc;
},
({ prev: undefined as (SegmentComponent<ClusterType> | undefined), next: undefined as SegmentComponent<ClusterType> | undefined })
),
filter((c) => c.next?.Timestamp! > seekTime),
map((c) => c.prev ?? c.next!)
);
};
const seekWithCues = (
cueSystem: CueSystem,
seekTime: number
): Observable<SegmentComponent<ClusterType>> => {
if (seekTime === 0) {
return seekWithoutCues(seekTime);
}
const cuePoint = cueSystem.findClosestCue(seekTime);
if (!cuePoint) {
return seekWithoutCues(seekTime);
}
return createRangedEbmlStream({
...options,
url,
byteStart: seekSystem.offsetFromSeekPosition(
cueSystem.getCueTrackPositions(cuePoint)
.CueClusterPosition as number
),
}).pipe(
switchMap((req) => req.ebml$),
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
map(clusterSystem.addClusterWithTag.bind(clusterSystem))
);
};
const seek = (
seekTime: number
): Observable<SegmentComponent<ClusterType>> => {
if (seekTime === 0) {
const subscription = merge(withCues$, withoutCues$).subscribe();
// if seekTime equals to 0 at start, reuse the initialize stream
return seekWithoutCues(seekTime).pipe(
finalize(() => {
subscription.unsubscribe();
})
);
}
return merge(
withCues$.pipe(switchMap((s) => seekWithCues(s.cue, seekTime))),
withoutCues$.pipe(switchMap((_) => seekWithoutCues(seekTime)))
);
};
return {
startTag,
head$,
segment,
meta$,
withMeta$,
withCues$,
withoutCues$,
withTags$,
withoutTags$,
seekWithCues,
seekWithoutCues,
seek,
};
})
);
return {
segments$,
head$,
totalSize,
ebml$,
controller,
response,
};
}),
shareReplay(1)
);
return {
controller$,
request$: metaRequest$,
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,62 +0,0 @@
import type { Type } from 'arktype';
import { EbmlElementType, EbmlTagIdEnum, type EbmlTagType } from 'konoebml';
import { IdMultiSet } from './schema';
export type InferType<T extends Type<any>> = T['infer'];
export type PredicateIdExtract<T, K> = Extract<T, { id: K }>;
export type PredicatePositionExtract<
T extends { position: string },
P,
> = P extends T['position'] ? T : never;
export function isTagIdPos<
I extends EbmlTagIdEnum,
P extends PredicateIdExtract<EbmlTagType, I>['position'] | '*' = '*',
>(id: I, pos?: P) {
return (tag: EbmlTagType): tag is PredicateIdExtract<EbmlTagType, I> =>
tag.id === id && (pos === '*' || pos === tag.position);
}
export function isTagPos<
T extends { position: string },
P extends T['position'],
>(pos: P | '*' = '*') {
return (tag: T): tag is PredicatePositionExtract<T, P> =>
pos === '*' || pos === tag.position;
}
export function convertEbmlTagToComponent(tag: EbmlTagType) {
if (tag.type === EbmlElementType.Master) {
const obj: Record<string, any> = {};
const children = tag.children;
for (const c of children) {
const name = EbmlTagIdEnum[c.id];
const converted = convertEbmlTagToComponent(c);
if (IdMultiSet.has(c.id)) {
if (obj[name]) {
obj[name].push(converted);
} else {
obj[name] = [converted];
}
} else {
obj[name] = converted;
}
}
return obj;
}
if (tag.id === EbmlTagIdEnum.SimpleBlock || tag.id === EbmlTagIdEnum.Block) {
return tag;
}
return tag.data;
}
export function waitTick() {
return new Promise<void>((resolve) => {
const timeout = setTimeout(() => {
resolve();
timeout && clearTimeout(timeout);
}, 0);
});
}

View File

@@ -18,9 +18,12 @@ import {
fromEvent,
filter,
} from 'rxjs';
import { createEbmlController } from './media/mkv/reactive';
import { TrackTypeRestrictionEnum, type ClusterType } from './media/mkv/schema';
import type { SegmentComponent } from './media/mkv/model';
import { createEbmlController } from '@konoplayer/matroska/reactive';
import {
TrackTypeRestrictionEnum,
type ClusterType,
} from '@konoplayer/matroska/schema';
import type { SegmentComponent } from '@konoplayer/matroska/model';
import { createRef, ref, type Ref } from 'lit/directives/ref.js';
import { Queue } from 'mnemonist';
@@ -113,7 +116,6 @@ export class VideoPipelineDemo extends LitElement {
description: videoTrack.CodecPrivate, // Uint8Array包含 VPS/SPS/PPS
});
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: <explanation>
const sub = this.cluster$.subscribe((c) => {
if (!isFinalized) {
for (const b of (c.SimpleBlock || []).filter(
@@ -163,7 +165,7 @@ export class VideoPipelineDemo extends LitElement {
const numberOfChannels =
(audioTrack.Audio?.Channels as number) || 2;
const duration =
Math.round(Number(audioTrack.DefaultDuration / 1000)) ||
Math.round(Number(audioTrack.DefaultDuration) / 1000) ||
Math.round((1024 / sampleRate) * 1000000);
decoder.configure({

View File

@@ -2,19 +2,25 @@
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"composite": true,
"target": "ES2020",
"outDir": "./dist",
"experimentalDecorators": true,
"module": "ESNext",
"moduleResolution": "bundler",
"useDefineForClassFields": false,
"paths": {
"@/*": [
"./src/*"
"@konoplayer/core/*": [
"../../packages/core/src/*"
],
"@konoplayer/matroska/*": [
"../../packages/matroska/src/*"
]
}
},
"include": [
"src"
],
"references": [
{
"path": "../../packages/core"
},
{
"path": "../../packages/matroska"
}
]
}

View File

@@ -1,5 +1,5 @@
{
"name": "proxy",
"name": "@konoplayer/proxy",
"version": "0.1.0",
"private": true,
"scripts": {

View File

@@ -0,0 +1 @@
{"version":"3.0.9","results":[[":src/matroska/codecs/av1.spec.ts",{"duration":52.71331099999952,"failed":false}]]}

17
apps/test/package.json Normal file
View File

@@ -0,0 +1,17 @@
{
"name": "@konoplayer/test",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {},
"dependencies": {
"@konoplayer/core": "workspace:*",
"@konoplayer/matroska": "workspace:*",
"konoebml": "^0.1.2"
},
"devDependencies": {
"unplugin-swc": "^1.5.1",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.0.9"
}
}

2
apps/test/resources/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
video/huge/*
!video/huge/.gitkeep

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,47 @@
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
import { VideoCodecId } from '@konoplayer/matroska/codecs';
import {
parseAV1DecoderConfigurationRecord,
genCodecStringByAV1DecoderConfigurationRecord,
} from '@konoplayer/matroska/codecs/av1';
import { loadComponentFromRangedResource } from '../utils/data';
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
import { isTagIdPos } from '@konoplayer/matroska/util';
describe('AV1 code test', () => {
it('should parse av1 meta from track entry', async () => {
const [segment] = await loadComponentFromRangedResource<SegmentType>({
resource: 'video/test-av1.mkv',
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
schema: SegmentSchema,
});
const av1Track = segment.Tracks?.TrackEntry.find(
(t) => t.CodecID === VideoCodecId.AV1
)!;
expect(av1Track).toBeDefined();
expect(av1Track.CodecPrivate).toBeDefined();
const meta = parseAV1DecoderConfigurationRecord(av1Track)!;
expect(meta).toBeDefined();
const codecStr = genCodecStringByAV1DecoderConfigurationRecord(meta);
expect(meta.marker).toBe(1);
expect(meta.version).toBe(1);
expect(meta.seqProfile).toBe(0);
expect(meta.seqLevelIdx0).toBe(1);
expect(meta.seqTier0).toBe(0);
expect(meta.highBitdepth).toBe(0);
expect(meta.monochrome).toBe(0);
expect(
`${meta.chromaSubsamplingX}${meta.chromaSubsamplingY}${meta.chromaSamplePosition}`
).toBe('110');
expect(meta.initialPresentationDelayMinus1).toBeUndefined();
expect(codecStr).toBe('av01.0.01M.08.0.110');
});
});

View File

@@ -0,0 +1,40 @@
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
import { VideoCodecId } from '@konoplayer/matroska/codecs';
import {
parseAVCDecoderConfigurationRecord,
genCodecStringByAVCDecoderConfigurationRecord,
} from '@konoplayer/matroska/codecs/avc';
import { loadComponentFromRangedResource } from '../utils/data';
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
import { isTagIdPos } from '@konoplayer/matroska/util';
describe('AVC code test', () => {
it('should parse avc meta from track entry', async () => {
const [segment] = await loadComponentFromRangedResource<SegmentType>({
resource: 'video/test-avc.mkv',
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
schema: SegmentSchema,
});
const avcTrack = segment.Tracks?.TrackEntry.find(
(t) => t.CodecID === VideoCodecId.H264
)!;
expect(avcTrack).toBeDefined();
expect(avcTrack.CodecPrivate).toBeDefined();
const meta = parseAVCDecoderConfigurationRecord(avcTrack)!;
expect(meta).toBeDefined();
const codecStr = genCodecStringByAVCDecoderConfigurationRecord(meta);
expect(meta.configurationVersion).toBe(1);
expect(meta.avcProfileIndication).toBe(100);
expect(meta.profileCompatibility).toBe(0);
expect(meta.avcLevelIndication).toBe(30);
expect(codecStr).toBe('avc1.64001e');
});
});

View File

@@ -0,0 +1,106 @@
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
import { VideoCodecId } from '@konoplayer/matroska/codecs';
import {
parseHEVCDecoderConfigurationRecord,
genCodecStringByHEVCDecoderConfigurationRecord,
HEVCDecoderConfigurationRecordType,
} from '@konoplayer/matroska/codecs/hevc';
import { loadComponentFromRangedResource } from '../utils/data';
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
import { isTagIdPos } from '@konoplayer/matroska/util';
import { assert } from 'vitest';
describe('HEVC codec test', () => {
it('should parse hevc meta from track entry', async () => {
const [segment] = await loadComponentFromRangedResource<SegmentType>({
resource: 'video/test-hevc.mkv',
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
schema: SegmentSchema,
});
const hevcTrack = segment.Tracks?.TrackEntry.find(
(t) => t.CodecID === VideoCodecId.HEVC
)!;
expect(hevcTrack).toBeDefined();
expect(hevcTrack.CodecPrivate).toBeDefined();
const meta = parseHEVCDecoderConfigurationRecord(hevcTrack);
expect(meta).toBeDefined();
const codecStr = genCodecStringByHEVCDecoderConfigurationRecord(meta);
expect(codecStr).toBe('hev1.1.6.L63.90');
});
it('should match chrome test suite', () => {
function makeHEVCParameterSet(
generalProfileSpace: number,
generalProfileIDC: number,
generalProfileCompatibilityFlags: number,
generalTierFlag: number,
generalConstraintIndicatorFlags: [
number,
number,
number,
number,
number,
number,
],
generalLevelIDC: number
) {
return {
generalProfileSpace: generalProfileSpace,
generalProfileIdc: generalProfileIDC,
generalProfileCompatibilityFlags: generalProfileCompatibilityFlags,
generalTierFlag: generalTierFlag,
generalConstraintIndicatorFlags: Number(
new DataView(
new Uint8Array([0, 0, ...generalConstraintIndicatorFlags]).buffer
).getBigUint64(0, false)
),
generalLevelIdc: generalLevelIDC,
} as unknown as HEVCDecoderConfigurationRecordType;
}
assert(
genCodecStringByHEVCDecoderConfigurationRecord(
makeHEVCParameterSet(0, 1, 0x60000000, 0, [0, 0, 0, 0, 0, 0], 93)
),
'hev1.1.6.L93'
);
assert(
genCodecStringByHEVCDecoderConfigurationRecord(
makeHEVCParameterSet(1, 4, 0x82000000, 1, [0, 0, 0, 0, 0, 0], 120)
),
'hev1.A4.41.H120'
);
assert(
genCodecStringByHEVCDecoderConfigurationRecord(
makeHEVCParameterSet(0, 1, 0x60000000, 0, [176, 0, 0, 0, 0, 0], 93)
),
'hev1.1.6.L93.B0'
);
assert(
genCodecStringByHEVCDecoderConfigurationRecord(
makeHEVCParameterSet(1, 4, 0x82000000, 1, [176, 35, 0, 0, 0, 0], 120)
),
'hev1.A4.41.H120.B0.23'
);
assert(
genCodecStringByHEVCDecoderConfigurationRecord(
makeHEVCParameterSet(
2,
1,
0xf77db57b,
1,
[18, 52, 86, 120, 154, 188],
254
)
),
'hev1.B1.DEADBEEF.H254.12.34.56.78.9A.BC'
);
});
});

View File

@@ -0,0 +1,54 @@
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
import { VideoCodecId } from '@konoplayer/matroska/codecs';
import {
genCodecStringByVP9DecoderConfigurationRecord,
parseVP9DecoderConfigurationRecord,
VP9ColorSpaceEnum,
VP9Subsampling,
} from '@konoplayer/matroska/codecs/vp9';
import { loadComponentFromRangedResource } from '../utils/data';
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
import { isTagIdPos } from '@konoplayer/matroska/util';
describe('VP9 code test', () => {
it('should parse vp9 meta from track entry and keyframe', async () => {
const [segment] = await loadComponentFromRangedResource<SegmentType>({
resource: 'video/test-vp9.mkv',
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
schema: SegmentSchema,
});
const vp9Track = segment.Tracks?.TrackEntry.find(
(t) => t.CodecID === VideoCodecId.VP9
)!;
expect(vp9Track).toBeDefined();
expect(vp9Track.CodecPrivate).toBeFalsy();
const keyframe = segment
.Cluster!.flatMap((c) => c.SimpleBlock || [])
.find((b) => b.keyframe && b.track === vp9Track.TrackNumber)!;
expect(keyframe).toBeDefined();
expect(keyframe.frames.length).toBe(1);
const meta = parseVP9DecoderConfigurationRecord(
vp9Track,
keyframe.frames[0]
)!;
expect(meta).toBeDefined();
expect(meta.bitDepth).toBe(8);
expect(meta.subsampling).toBe(VP9Subsampling.YUV420);
expect(meta.width).toBe(640);
expect(meta.height).toBe(360);
expect(meta.colorSpace).toBe(VP9ColorSpaceEnum.BT_601);
expect(meta.profile).toBe(0);
const codecStr = genCodecStringByVP9DecoderConfigurationRecord(meta);
expect(codecStr).toBe('vp09.00.21.08');
});
});

View File

@@ -0,0 +1,56 @@
import { Type } from 'arktype';
import { EbmlStreamDecoder, EbmlTagPosition, EbmlTagType } from 'konoebml';
import { convertEbmlTagToComponent } from '@konoplayer/matroska/util';
import fs from 'node:fs';
import { Readable } from 'node:stream';
import { TransformStream } from 'node:stream/web';
import path from 'node:path';
export interface LoadRangedResourceOptions<S extends Type<any> = any> {
resource: string;
byteStart?: number;
byteEnd?: number;
schema?: S;
predicate?: (tag: EbmlTagType) => boolean;
}
export async function loadComponentFromRangedResource<
T,
S extends Type<any> = any,
>({
resource,
byteStart,
byteEnd,
predicate = (tag) => !tag?.parent && tag.position !== EbmlTagPosition.Start,
schema,
}: LoadRangedResourceOptions<S>): Promise<T[]> {
const input = Readable.toWeb(
fs.createReadStream(
path.join(import.meta.dirname, '..', '..', '..', 'resources', resource),
{
start: byteStart,
end: byteEnd,
}
)
);
const output = input.pipeThrough(
new EbmlStreamDecoder({
streamStartOffset: byteStart,
collectChild: true,
}) as unknown as TransformStream<Uint8Array, EbmlTagType>
);
const result: T[] = [];
for await (const t of output) {
if (predicate(t)) {
let component = convertEbmlTagToComponent(t) as T;
if (schema) {
component = schema.assert(component);
}
result.push(component);
}
}
return result;
}

30
apps/test/tsconfig.json Normal file
View File

@@ -0,0 +1,30 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"composite": true,
"outDir": "./dist",
"types": [
"vitest/globals",
"node"
],
"paths": {
"@konoplayer/core/*": [
"../../packages/core/src/*"
],
"@konoplayer/matroska/*": [
"../../packages/matroska/src/*"
]
}
},
"include": [
"src"
],
"references": [
{
"path": "../../packages/core"
},
{
"path": "../../packages/matroska"
}
]
}

View File

@@ -0,0 +1,33 @@
import swc from 'unplugin-swc';
import tsconfigPaths from 'vite-tsconfig-paths';
import { defineConfig } from 'vitest/config';
export default defineConfig({
cacheDir: '.vitest',
test: {
setupFiles: ['src/init-test.ts'],
environment: 'happy-dom',
include: ['src/**/*.spec.ts'],
globals: true,
restoreMocks: true,
coverage: {
// you can include other reporters, but 'json-summary' is required, json is recommended
reporter: ['text', 'json-summary', 'json'],
// If you want a coverage reports even if your tests are failing, include the reportOnFailure option
reportOnFailure: true,
include: ['../../packages/core/src/**', '../../packages/matroska/src/**'],
},
},
plugins: [
tsconfigPaths(),
swc.vite({
include: /\.[mc]?[jt]sx?$/,
// for git+ package only
exclude: [
/node_modules\/(?!@konoplayer|\.pnpm)/,
/node_modules\/\.pnpm\/(?!@konoplayer)/,
] as any,
tsconfigFile: './tsconfig.json',
}),
],
});