Compare commits
9 Commits
0b681d4fd1
...
ee4eee473d
Author | SHA1 | Date | |
---|---|---|---|
ee4eee473d | |||
9896c4caec | |||
8cc1a2bab1 | |||
39e17eb6a5 | |||
3c317627e7 | |||
39a4cf2773 | |||
42e36e3c68 | |||
53f2bc8ca7 | |||
54edfd2fdc |
6
.vscode/extensions.json
vendored
6
.vscode/extensions.json
vendored
@ -1,5 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"runem.lit-plugin"
|
||||
"runem.lit-plugin",
|
||||
"vitest.explorer",
|
||||
"biomejs.biome",
|
||||
"hbenl.vscode-test-explorer",
|
||||
"zerotaskx.rust-extension-pack"
|
||||
]
|
||||
}
|
409
Cargo.lock
generated
409
Cargo.lock
generated
@ -2,24 +2,75 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
@ -27,12 +78,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "demuxing"
|
||||
version = "0.1.0"
|
||||
name = "clang-sys"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
|
||||
dependencies = [
|
||||
"symphonia-format-mkv",
|
||||
"glob",
|
||||
"libc",
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
version = "0.8.35"
|
||||
@ -42,18 +103,247 @@ dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "extended"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af9673d8203fcb076b19dfd17e38b3d4ae9f44959416ea532ce72415a6020365"
|
||||
|
||||
[[package]]
|
||||
name = "ffmpeg-sys-next"
|
||||
version = "7.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bc3234d0a4b2f7d083699d0860c6c9dd83713908771b60f94a96f8704adfe45"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"cc",
|
||||
"libc",
|
||||
"num_cpus",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "konoplayer-ffmpeg"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ffmpeg-sys-next",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "konoplayer-symphonia"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"symphonia",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.171"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "symphonia"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "815c942ae7ee74737bb00f965fa5b5a2ac2ce7b6c01c0cc169bbeaf7abd5f5a9"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"symphonia-bundle-flac",
|
||||
"symphonia-codec-adpcm",
|
||||
"symphonia-codec-pcm",
|
||||
"symphonia-codec-vorbis",
|
||||
"symphonia-core",
|
||||
"symphonia-format-mkv",
|
||||
"symphonia-format-ogg",
|
||||
"symphonia-format-riff",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-bundle-flac"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72e34f34298a7308d4397a6c7fbf5b84c5d491231ce3dd379707ba673ab3bd97"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-adpcm"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c94e1feac3327cd616e973d5be69ad36b3945f16b06f19c6773fc3ac0b426a0f"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-pcm"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f395a67057c2ebc5e84d7bb1be71cce1a7ba99f64e0f0f0e303a03f79116f89b"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-vorbis"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a98765fb46a0a6732b007f7e2870c2129b6f78d87db7987e6533c8f164a9f30"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-core"
|
||||
version = "0.5.4"
|
||||
@ -61,7 +351,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "798306779e3dc7d5231bd5691f5a813496dc79d3f56bf82e25789f2094e022c3"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"bitflags",
|
||||
"bitflags 1.3.2",
|
||||
"bytemuck",
|
||||
"lazy_static",
|
||||
"log",
|
||||
@ -80,6 +370,30 @@ dependencies = [
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-format-ogg"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ada3505789516bcf00fc1157c67729eded428b455c27ca370e41f4d785bfa931"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-format-riff"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f7be232f962f937f4b7115cbe62c330929345434c834359425e043bfd15f50"
|
||||
dependencies = [
|
||||
"extended",
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-metadata"
|
||||
version = "0.5.4"
|
||||
@ -101,3 +415,90 @@ dependencies = [
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
@ -1,3 +1,3 @@
|
||||
[workspace]
|
||||
members = ["packages/demuxing"]
|
||||
resolver = "2"
|
||||
members = ["packages/symphonia", "packages/ffmpeg"]
|
||||
resolver = "3"
|
||||
|
39
README.md
39
README.md
@ -1 +1,40 @@
|
||||
# konoplayer
|
||||
|
||||
**A project initially launched solely to watch animations in the widely used but poorly supported MKV format in browsers, just for fun.**
|
||||
|
||||
## State of Prototype
|
||||
- [x] Matroska support
|
||||
- [x] Parse EBML and demux (Done / Typescript)
|
||||
- [x] Data validating fit matroska v4 doc (Done / Typescript)
|
||||
- [x] WebCodecs decode + Canvas rendering (Prototyping / Typescript)
|
||||
- [x] Parsing track CodecId/Private and generate Codec String (Partial / Typescript)
|
||||
- Video:
|
||||
- [x] VP9
|
||||
- [x] VP8
|
||||
- [x] AVC
|
||||
- [x] HEVC
|
||||
- [x] AV1
|
||||
- Audio:
|
||||
- [x] AAC
|
||||
- [x] MP3
|
||||
- [x] AC3
|
||||
- [ ] OPUS (not tested, need more work)
|
||||
- [ ] VORBIS (need fix)
|
||||
- [ ] EAC-3 (need fix)
|
||||
- [ ] PCM (need tested)
|
||||
- [ ] ALAC (need tested)
|
||||
- [ ] FLAC (need tested)
|
||||
- [ ] Wrap video element with customElements (Prototyping / Lit-html + Typescript)
|
||||
- [ ] Add WebCodecs polyfill with ffmpeg or libav (Todo / WASM)
|
||||
- [x] Chrome/Edge/Android Webview: WebCodecs Native support
|
||||
- [ ] FIREFOX
|
||||
- [x] VP8/VP9/AV1 native support
|
||||
- [x] AVC/HEVC 8bit native support
|
||||
- [ ] AVC/HEVC >= 10bit polyfill needed
|
||||
- [ ] Firefox Android not support
|
||||
- [ ] Safari
|
||||
- [x] VP8/VP9/AV1 native support
|
||||
- [x] AVC/HEVC 8bit native support
|
||||
- [ ] AVC/HEVC >= 10bit polyfill needed for some devices
|
||||
- [ ] Audio Decoder polyfill needed
|
||||
- [ ] Danmuku integration (Todo / Typescript)
|
1
apps/mock/.gitignore
vendored
1
apps/mock/.gitignore
vendored
@ -1 +0,0 @@
|
||||
public/video-sample/huge/*
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "mock",
|
||||
"name": "@konoplayer/mock",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
2
apps/mock/public/.gitignore
vendored
Normal file
2
apps/mock/public/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
video/huge/*
|
||||
!video/huge/.gitkeep
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
apps/mock/public/video/test-av1.mkv
Normal file
BIN
apps/mock/public/video/test-av1.mkv
Normal file
Binary file not shown.
BIN
apps/mock/public/video/test-avc.mkv
Normal file
BIN
apps/mock/public/video/test-avc.mkv
Normal file
Binary file not shown.
BIN
apps/mock/public/video/test-hevc.mkv
Normal file
BIN
apps/mock/public/video/test-hevc.mkv
Normal file
Binary file not shown.
BIN
apps/mock/public/video/test-theora.mkv
Normal file
BIN
apps/mock/public/video/test-theora.mkv
Normal file
Binary file not shown.
BIN
apps/mock/public/video/test-vp8.mkv
Normal file
BIN
apps/mock/public/video/test-vp8.mkv
Normal file
Binary file not shown.
BIN
apps/mock/public/video/test-vp9.mkv
Normal file
BIN
apps/mock/public/video/test-vp9.mkv
Normal file
Binary file not shown.
@ -4,10 +4,8 @@
|
||||
"composite": true,
|
||||
"module": "CommonJS",
|
||||
"moduleResolution": "node",
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": false,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowImportingTsExtensions": false,
|
||||
"outDir": "./dist",
|
||||
"rootDir": ".",
|
||||
@ -23,6 +21,6 @@
|
||||
"node_modules",
|
||||
"dist",
|
||||
"test",
|
||||
"**/*spec.ts"
|
||||
"**/*spec"
|
||||
]
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "playground",
|
||||
"version": "1.0.0",
|
||||
"name": "@konoplayer/playground",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@ -9,11 +9,11 @@
|
||||
"preview": "rsbuild preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"konoebml": "0.1.1",
|
||||
"lit": "^3.2.1"
|
||||
"lit": "^3.2.1",
|
||||
"@konoplayer/core": "workspace:*",
|
||||
"@konoplayer/matroska": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rsbuild/core": "^1.2.14",
|
||||
"typescript": "^5.8.2"
|
||||
"@rsbuild/core": "^1.2.14"
|
||||
}
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
export interface RangedStream {
|
||||
controller: AbortController;
|
||||
response: Response;
|
||||
body: ReadableStream<Uint8Array>;
|
||||
totalSize?: number;
|
||||
}
|
||||
|
||||
export async function createRangedStream(
|
||||
url: string,
|
||||
byteStart = 0,
|
||||
byteEnd?: number
|
||||
) {
|
||||
const controller = new AbortController();
|
||||
const signal = controller.signal;
|
||||
const headers = new Headers();
|
||||
headers.append(
|
||||
'Range',
|
||||
typeof byteEnd === 'number'
|
||||
? `bytes=${byteStart}-${byteEnd}`
|
||||
: `bytes=${byteStart}-`
|
||||
);
|
||||
|
||||
const response = await fetch(url, { signal, headers });
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('fetch video stream failed');
|
||||
}
|
||||
|
||||
const acceptRanges = response.headers.get('Accept-Ranges');
|
||||
|
||||
if (acceptRanges !== 'bytes') {
|
||||
throw new Error('video server does not support byte ranges');
|
||||
}
|
||||
|
||||
const body = response.body;
|
||||
|
||||
if (!(body instanceof ReadableStream)) {
|
||||
throw new Error('can not get readable stream from response.body');
|
||||
}
|
||||
|
||||
const contentRange = response.headers.get('Content-Range');
|
||||
|
||||
//
|
||||
// Content-Range Header Syntax:
|
||||
// Content-Range: <unit> <range-start>-<range-end>/<size>
|
||||
// Content-Range: <unit> <range-start>-<range-end>/*
|
||||
// Content-Range: <unit> */<size>
|
||||
//
|
||||
const totalSize = contentRange
|
||||
? Number.parseInt(contentRange.split('/')[1], 10)
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
controller,
|
||||
response,
|
||||
body,
|
||||
totalSize,
|
||||
};
|
||||
}
|
||||
|
@ -3,7 +3,8 @@
|
||||
<head></head>
|
||||
|
||||
<body>
|
||||
<my-element />
|
||||
<video-pipeline-demo src="/api/static/video-sample/test.webm" />
|
||||
<!-- <video-pipeline-demo src="/api/static/video-sample/huge/animation.mkv" /> -->
|
||||
<!-- <my-element />-->
|
||||
<!-- <video-pipeline-demo src="/api/static/video/test-hevc.mkv" width="800" height="450"></video-pipeline-demo> -->
|
||||
<video-pipeline-demo src="/api/static/video/huge/test8.mkv" width="800" height="450"></video-pipeline-demo>
|
||||
<!-- <video-pipeline-demo src="/api/static/video/huge/[LoliHouse] Amagami-san Chi no Enmusubi - 23 [WebRip 1080p HEVC-10bit AAC SRTx2].mkv" width="800" height="450" /> -->
|
||||
</body>
|
@ -1,282 +0,0 @@
|
||||
import {
|
||||
type EbmlClusterTagType,
|
||||
type EbmlCuePointTagType,
|
||||
type EbmlCuesTagType,
|
||||
type EbmlInfoTagType,
|
||||
type EbmlMasterTagType,
|
||||
type EbmlSeekHeadTagType,
|
||||
type EbmlSegmentTagType,
|
||||
EbmlTagIdEnum,
|
||||
EbmlTagPosition,
|
||||
type EbmlTagType,
|
||||
type EbmlTrackEntryTagType,
|
||||
type EbmlTracksTagType,
|
||||
} from 'konoebml';
|
||||
import {convertEbmlTagToComponent, type InferType,} from './util';
|
||||
import {isEqual, maxBy} from 'lodash-es';
|
||||
import {ArkErrors, type Type} from 'arktype';
|
||||
import {
|
||||
ClusterSchema,
|
||||
type ClusterType,
|
||||
CuePointSchema,
|
||||
type CuePointType,
|
||||
type CueTrackPositionsType,
|
||||
InfoSchema,
|
||||
type InfoType,
|
||||
SeekHeadSchema,
|
||||
type SeekHeadType,
|
||||
TrackEntrySchema,
|
||||
type TrackEntryType
|
||||
} from './schema';
|
||||
|
||||
export const SEEK_ID_KAX_INFO = new Uint8Array([0x15, 0x49, 0xa9, 0x66]);
|
||||
export const SEEK_ID_KAX_TRACKS = new Uint8Array([0x16, 0x54, 0xae, 0x6b]);
|
||||
export const SEEK_ID_KAX_CUES = new Uint8Array([0x1c, 0x53, 0xbb, 0x6b]);
|
||||
|
||||
export class SegmentSystem {
|
||||
startTag: EbmlSegmentTagType;
|
||||
headTags: EbmlTagType[] = [];
|
||||
|
||||
cue: CueSystem;
|
||||
cluster: ClusterSystem;
|
||||
seek: SeekSystem;
|
||||
info: InfoSystem;
|
||||
track: TrackSystem;
|
||||
|
||||
|
||||
constructor(startNode: EbmlSegmentTagType) {
|
||||
this.startTag = startNode;
|
||||
this.cue = new CueSystem(this);
|
||||
this.cluster = new ClusterSystem(this);
|
||||
this.seek = new SeekSystem(this);
|
||||
this.info = new InfoSystem(this);
|
||||
this.track = new TrackSystem(this);
|
||||
}
|
||||
|
||||
get dataStartOffset() {
|
||||
return this.startTag.startOffset + this.startTag.headerLength;
|
||||
}
|
||||
|
||||
get startOffset () {
|
||||
return this.startTag.startOffset;
|
||||
}
|
||||
|
||||
completeHeads () {
|
||||
const infoTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_INFO);
|
||||
const tracksTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_TRACKS);
|
||||
const cuesTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_CUES);
|
||||
|
||||
if (cuesTag?.id === EbmlTagIdEnum.Cues) {
|
||||
this.cue.prepareCuesWithTag(cuesTag)
|
||||
}
|
||||
if (infoTag?.id === EbmlTagIdEnum.Info) {
|
||||
this.info.prepareWithInfoTag(infoTag);
|
||||
}
|
||||
if (tracksTag?.id === EbmlTagIdEnum.Tracks) {
|
||||
this.track.prepareTracksWithTag(tracksTag);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
scanHead (tag: EbmlTagType) {
|
||||
if (
|
||||
tag.id === EbmlTagIdEnum.SeekHead &&
|
||||
tag.position === EbmlTagPosition.End
|
||||
) {
|
||||
this.seek.addSeekHeadTag(tag);
|
||||
}
|
||||
this.headTags.push(tag);
|
||||
this.seek.memoTag(tag);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class SegmentComponentSystemTrait<E extends EbmlMasterTagType, S extends Type<any>> {
|
||||
segment: SegmentSystem;
|
||||
|
||||
get schema(): S {
|
||||
throw new Error("unimplemented!")
|
||||
}
|
||||
|
||||
constructor(segment: SegmentSystem) {
|
||||
this.segment = segment;
|
||||
}
|
||||
|
||||
componentFromTag(tag: E): InferType<S> {
|
||||
const extracted = convertEbmlTagToComponent(tag);
|
||||
const result = this.schema(extracted);
|
||||
if (result instanceof ArkErrors) {
|
||||
const errors = result;
|
||||
console.error('Parse component from tag error:', tag.toDebugRecord(), errors.flatProblemsByPath)
|
||||
throw errors;
|
||||
}
|
||||
return result as InferType<S>
|
||||
}
|
||||
}
|
||||
|
||||
export class SeekSystem extends SegmentComponentSystemTrait<EbmlSeekHeadTagType, typeof SeekHeadSchema> {
|
||||
override get schema() {
|
||||
return SeekHeadSchema;
|
||||
}
|
||||
|
||||
seekHeads: SeekHeadType[] = [];
|
||||
offsetToTagMemo: Map<number, EbmlTagType> = new Map();
|
||||
|
||||
memoTag (tag: EbmlTagType) {
|
||||
this.offsetToTagMemo.set(tag.startOffset, tag);
|
||||
}
|
||||
|
||||
addSeekHeadTag (tag: EbmlSeekHeadTagType) {
|
||||
const seekHead = this.componentFromTag(tag);
|
||||
this.seekHeads.push(seekHead);
|
||||
return seekHead;
|
||||
}
|
||||
|
||||
offsetFromSeekPosition (position: number): number {
|
||||
return position + this.segment.startOffset;
|
||||
}
|
||||
|
||||
offsetFromSeekDataPosition (position: number) : number {
|
||||
return position + this.segment.dataStartOffset;
|
||||
}
|
||||
|
||||
seekTagByStartOffset (
|
||||
startOffset: number | undefined
|
||||
): EbmlTagType | undefined {
|
||||
return startOffset! >= 0
|
||||
? this.offsetToTagMemo.get(startOffset!)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
seekOffsetBySeekId(seekId: Uint8Array): number | undefined {
|
||||
const seekPosition = this.seekHeads[0]?.Seek?.find((c) => isEqual(c.SeekID, seekId))
|
||||
?.SeekPosition;
|
||||
return seekPosition! >= 0 ? this.offsetFromSeekPosition(seekPosition!) : undefined;
|
||||
}
|
||||
|
||||
seekTagBySeekId(seekId: Uint8Array): EbmlTagType | undefined {
|
||||
return this.seekTagByStartOffset(
|
||||
this.seekOffsetBySeekId(seekId)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class InfoSystem extends SegmentComponentSystemTrait<EbmlInfoTagType, typeof InfoSchema> {
|
||||
override get schema() {
|
||||
return InfoSchema;
|
||||
}
|
||||
|
||||
info!: InfoType;
|
||||
|
||||
prepareWithInfoTag (tag: EbmlInfoTagType) {
|
||||
this.info = this.componentFromTag(tag);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class ClusterSystem extends SegmentComponentSystemTrait<EbmlClusterTagType, typeof ClusterSchema> {
|
||||
override get schema() {
|
||||
return ClusterSchema
|
||||
}
|
||||
|
||||
clustersBuffer: ClusterType[] = [];
|
||||
|
||||
addClusterWithTag (tag: EbmlClusterTagType): ClusterType {
|
||||
const cluster = this.componentFromTag(tag);
|
||||
this.clustersBuffer.push(cluster);
|
||||
return cluster;
|
||||
}
|
||||
}
|
||||
|
||||
export class TrackSystem extends SegmentComponentSystemTrait<EbmlTrackEntryTagType, typeof TrackEntrySchema> {
|
||||
override get schema() {
|
||||
return TrackEntrySchema;
|
||||
}
|
||||
|
||||
tracks = new Map<number, TrackEntryType>();
|
||||
|
||||
prepareTracksWithTag (tag: EbmlTracksTagType) {
|
||||
this.tracks.clear();
|
||||
for (const c of tag.children) {
|
||||
if (c.id === EbmlTagIdEnum.TrackEntry) {
|
||||
const trackEntry = this.componentFromTag(c);
|
||||
this.tracks.set(trackEntry.TrackNumber, trackEntry);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class CueSystem extends SegmentComponentSystemTrait<
|
||||
EbmlCuePointTagType,
|
||||
typeof CuePointSchema
|
||||
> {
|
||||
override get schema () {
|
||||
return CuePointSchema
|
||||
};
|
||||
|
||||
cues: CuePointType[] = [];
|
||||
|
||||
|
||||
prepareCuesWithTag (tag: EbmlCuesTagType) {
|
||||
this.cues = tag.children
|
||||
.filter(c => c.id === EbmlTagIdEnum.CuePoint)
|
||||
.map(this.componentFromTag.bind(this));
|
||||
return this;
|
||||
}
|
||||
|
||||
findClosestCue(seekTime: number): CuePointType | undefined {
|
||||
const cues = this.cues;
|
||||
if (!cues || cues.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let left = 0;
|
||||
let right = cues.length - 1;
|
||||
|
||||
if (seekTime <= cues[0].CueTime) {
|
||||
return cues[0];
|
||||
}
|
||||
|
||||
if (seekTime >= cues[right].CueTime) {
|
||||
return cues[right];
|
||||
}
|
||||
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
|
||||
if (cues[mid].CueTime === seekTime) {
|
||||
return cues[mid];
|
||||
}
|
||||
|
||||
if (cues[mid].CueTime < seekTime) {
|
||||
left = mid + 1;
|
||||
} else {
|
||||
right = mid - 1;
|
||||
}
|
||||
}
|
||||
|
||||
const before = cues[right];
|
||||
const after = cues[left];
|
||||
return Math.abs(before.CueTime - seekTime) <
|
||||
Math.abs(after.CueTime - seekTime)
|
||||
? before
|
||||
: after;
|
||||
}
|
||||
|
||||
getCueTrackPositions (cuePoint: CuePointType, track?: number): CueTrackPositionsType {
|
||||
let cueTrackPositions: CueTrackPositionsType | undefined;
|
||||
if (track! >= 0) {
|
||||
cueTrackPositions = cuePoint.CueTrackPositions.find(c => c.CueTrack === track);
|
||||
}
|
||||
if (!cueTrackPositions) {
|
||||
cueTrackPositions = maxBy(cuePoint.CueTrackPositions, c => c.CueClusterPosition)!;
|
||||
}
|
||||
return cueTrackPositions;
|
||||
}
|
||||
|
||||
get prepared (): boolean {
|
||||
return this.cues.length > 0;
|
||||
}
|
||||
}
|
@ -1,319 +0,0 @@
|
||||
import {
|
||||
type EbmlTagType,
|
||||
EbmlStreamDecoder,
|
||||
EbmlTagIdEnum,
|
||||
EbmlTagPosition,
|
||||
} from 'konoebml';
|
||||
import {
|
||||
Observable,
|
||||
from,
|
||||
switchMap,
|
||||
share,
|
||||
defer,
|
||||
EMPTY,
|
||||
of,
|
||||
filter,
|
||||
finalize,
|
||||
isEmpty,
|
||||
map,
|
||||
merge,
|
||||
raceWith,
|
||||
reduce,
|
||||
scan,
|
||||
shareReplay,
|
||||
take,
|
||||
takeUntil,
|
||||
withLatestFrom,
|
||||
} from 'rxjs';
|
||||
import { createRangedStream } from '@/fetch';
|
||||
import { SegmentSystem, SEEK_ID_KAX_CUES, type CueSystem } from './model';
|
||||
import { isTagIdPos } from './util';
|
||||
import type { ClusterType } from "./schema";
|
||||
|
||||
export function createRangedEbmlStream(
|
||||
url: string,
|
||||
byteStart = 0,
|
||||
byteEnd?: number
|
||||
): Observable<{
|
||||
ebml$: Observable<EbmlTagType>;
|
||||
totalSize?: number;
|
||||
response: Response;
|
||||
body: ReadableStream<Uint8Array>;
|
||||
controller: AbortController;
|
||||
}> {
|
||||
const stream$ = from(createRangedStream(url, byteStart, byteEnd));
|
||||
|
||||
return stream$.pipe(
|
||||
switchMap(({ controller, body, totalSize, response }) => {
|
||||
let requestCompleted = false;
|
||||
const originRequest$ = new Observable<EbmlTagType>((subscriber) => {
|
||||
body
|
||||
.pipeThrough(
|
||||
new EbmlStreamDecoder({
|
||||
streamStartOffset: byteStart,
|
||||
collectChild: (child) => child.id !== EbmlTagIdEnum.Cluster,
|
||||
})
|
||||
)
|
||||
.pipeTo(
|
||||
new WritableStream({
|
||||
write: (tag) => subscriber.next(tag),
|
||||
close: () => {
|
||||
if (!requestCompleted) {
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
})
|
||||
)
|
||||
.catch((error) => {
|
||||
if (requestCompleted && error?.name === 'AbortError') {
|
||||
return;
|
||||
}
|
||||
subscriber.error(error);
|
||||
});
|
||||
|
||||
return () => {
|
||||
requestCompleted = true;
|
||||
controller.abort();
|
||||
};
|
||||
}).pipe(
|
||||
share({
|
||||
resetOnComplete: false,
|
||||
resetOnError: false,
|
||||
resetOnRefCountZero: true,
|
||||
})
|
||||
);
|
||||
|
||||
const ebml$ = defer(() =>
|
||||
requestCompleted ? EMPTY : originRequest$
|
||||
).pipe(
|
||||
share({
|
||||
resetOnError: false,
|
||||
resetOnComplete: true,
|
||||
resetOnRefCountZero: true,
|
||||
})
|
||||
);
|
||||
|
||||
return of({
|
||||
ebml$,
|
||||
totalSize,
|
||||
response,
|
||||
body,
|
||||
controller,
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
export function createEbmlController(src: string) {
|
||||
const request$ = createRangedEbmlStream(src, 0);
|
||||
|
||||
const controller$ = request$.pipe(
|
||||
map(({ totalSize, ebml$, response, controller }) => {
|
||||
const head$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.EBML, EbmlTagPosition.End)),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
console.debug(
|
||||
`stream of video "${src}" created, total size is ${totalSize ?? 'unknown'}`
|
||||
);
|
||||
|
||||
const segmentStart$ = ebml$.pipe(
|
||||
filter((s) => s.position === EbmlTagPosition.Start),
|
||||
filter((tag) => tag.id === EbmlTagIdEnum.Segment)
|
||||
);
|
||||
|
||||
const segments$ = segmentStart$.pipe(
|
||||
map((startTag) => {
|
||||
const segment = new SegmentSystem(startTag);
|
||||
const clusterSystem = segment.cluster;
|
||||
const seekSystem = segment.seek;
|
||||
|
||||
const continuousReusedCluster$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
|
||||
filter((s) => s.id === EbmlTagIdEnum.Cluster),
|
||||
map(clusterSystem.addClusterWithTag.bind(clusterSystem))
|
||||
);
|
||||
|
||||
const segmentEnd$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End)),
|
||||
filter((tag) => tag.id === EbmlTagIdEnum.Segment),
|
||||
take(1)
|
||||
);
|
||||
|
||||
const clusterStart$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.Start)),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const meta$ = ebml$.pipe(
|
||||
takeUntil(clusterStart$.pipe(raceWith(segmentEnd$))),
|
||||
share({
|
||||
resetOnComplete: false,
|
||||
resetOnError: false,
|
||||
resetOnRefCountZero: true,
|
||||
})
|
||||
);
|
||||
|
||||
const withMeta$ = meta$.pipe(
|
||||
reduce((segment, meta) => segment.scanHead(meta), segment),
|
||||
map(segment.completeHeads.bind(segment)),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const withRemoteCues$ = withMeta$.pipe(
|
||||
switchMap((s) => {
|
||||
const cueSystem = s.cue;
|
||||
const seekSystem = s.seek;
|
||||
if (cueSystem.prepared) {
|
||||
return EMPTY;
|
||||
}
|
||||
const remoteCuesTagStartOffset = seekSystem.seekOffsetBySeekId(SEEK_ID_KAX_CUES);
|
||||
if (remoteCuesTagStartOffset! >= 0) {
|
||||
return createRangedEbmlStream(src, remoteCuesTagStartOffset).pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cues, EbmlTagPosition.End)),
|
||||
withLatestFrom(withMeta$),
|
||||
map(([cues, withMeta]) => {
|
||||
withMeta.cue.prepareCuesWithTag(cues);
|
||||
return withMeta;
|
||||
})
|
||||
);
|
||||
}
|
||||
return EMPTY;
|
||||
}),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const withLocalCues$ = withMeta$.pipe(
|
||||
switchMap((s) => s.cue.prepared ? of(s) : EMPTY),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const withCues$ = merge(withLocalCues$, withRemoteCues$).pipe(
|
||||
take(1)
|
||||
);
|
||||
|
||||
const withoutCues$ = withCues$.pipe(
|
||||
isEmpty(),
|
||||
switchMap((empty) => (empty ? withMeta$ : EMPTY))
|
||||
);
|
||||
|
||||
const seekWithoutCues = (seekTime: number): Observable<ClusterType> => {
|
||||
const cluster$ = continuousReusedCluster$.pipe(
|
||||
isEmpty(),
|
||||
switchMap((empty) => {
|
||||
return empty
|
||||
? clusterStart$.pipe(
|
||||
switchMap((startTag) =>
|
||||
createRangedEbmlStream(src, startTag.startOffset)
|
||||
),
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(
|
||||
isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)
|
||||
),
|
||||
map((tag) => clusterSystem.addClusterWithTag(tag))
|
||||
)
|
||||
: continuousReusedCluster$;
|
||||
})
|
||||
);
|
||||
if (seekTime === 0) {
|
||||
return cluster$;
|
||||
}
|
||||
|
||||
return cluster$.pipe(
|
||||
scan(
|
||||
(prev, curr) =>
|
||||
[prev?.[1], curr] as [
|
||||
ClusterType | undefined,
|
||||
ClusterType | undefined,
|
||||
],
|
||||
[undefined, undefined] as [
|
||||
ClusterType | undefined,
|
||||
ClusterType | undefined,
|
||||
]
|
||||
),
|
||||
filter((c) => c[1]?.Timestamp! > seekTime),
|
||||
map((c) => c[0] ?? c[1]!)
|
||||
);
|
||||
};
|
||||
|
||||
const seekWithCues = (
|
||||
cues: CueSystem,
|
||||
seekTime: number
|
||||
): Observable<ClusterType> => {
|
||||
if (seekTime === 0) {
|
||||
return seekWithoutCues(seekTime);
|
||||
}
|
||||
|
||||
const cuePoint = cues.findClosestCue(seekTime);
|
||||
|
||||
if (!cuePoint) {
|
||||
return seekWithoutCues(seekTime);
|
||||
}
|
||||
|
||||
return createRangedEbmlStream(
|
||||
src,
|
||||
seekSystem.offsetFromSeekDataPosition(cues.getCueTrackPositions(cuePoint).CueClusterPosition)
|
||||
).pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
|
||||
map(clusterSystem.addClusterWithTag.bind(clusterSystem))
|
||||
);
|
||||
};
|
||||
|
||||
const seek = (seekTime: number): Observable<ClusterType> => {
|
||||
if (seekTime === 0) {
|
||||
const subscription = merge(withCues$, withoutCues$).subscribe();
|
||||
|
||||
// if seekTime equals to 0 at start, reuse the initialize stream
|
||||
return seekWithoutCues(seekTime).pipe(
|
||||
finalize(() => {
|
||||
subscription.unsubscribe();
|
||||
})
|
||||
);
|
||||
}
|
||||
return merge(
|
||||
withCues$.pipe(
|
||||
switchMap((s) =>
|
||||
seekWithCues(s.cue, seekTime)
|
||||
)
|
||||
),
|
||||
withoutCues$.pipe(switchMap((_) => seekWithoutCues(seekTime)))
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
startTag,
|
||||
head$,
|
||||
segment,
|
||||
meta$,
|
||||
withMeta$,
|
||||
withCues$,
|
||||
withoutCues$,
|
||||
seekWithCues,
|
||||
seekWithoutCues,
|
||||
seek,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
segments$,
|
||||
head$,
|
||||
totalSize,
|
||||
ebml$,
|
||||
controller,
|
||||
response,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
controller$,
|
||||
request$,
|
||||
};
|
||||
}
|
@ -1,49 +1,358 @@
|
||||
import { html, css, LitElement } from 'lit';
|
||||
import { property } from 'lit/decorators.js';
|
||||
import { type Subscription, switchMap, take } from 'rxjs';
|
||||
import { createEbmlController } from './media/mkv/reactive';
|
||||
import {
|
||||
animationFrames,
|
||||
BehaviorSubject,
|
||||
combineLatest,
|
||||
EMPTY,
|
||||
map,
|
||||
Subject,
|
||||
switchMap,
|
||||
take,
|
||||
distinctUntilChanged,
|
||||
fromEvent,
|
||||
share,
|
||||
takeUntil,
|
||||
firstValueFrom,
|
||||
tap,
|
||||
throwIfEmpty,
|
||||
ReplaySubject,
|
||||
} from 'rxjs';
|
||||
import { createMatroska } from '@konoplayer/matroska/model';
|
||||
import { createRef, ref, type Ref } from 'lit/directives/ref.js';
|
||||
import { Queue } from 'mnemonist';
|
||||
|
||||
import type {
|
||||
AudioTrackContext,
|
||||
VideoTrackContext,
|
||||
} from '@konoplayer/matroska/systems';
|
||||
import {
|
||||
captureCanvasAsVideoSrcObject,
|
||||
createRenderingContext,
|
||||
renderBitmapAtRenderingContext,
|
||||
} from '@konoplayer/core/graphics';
|
||||
|
||||
export class VideoPipelineDemo extends LitElement {
|
||||
static styles = css``;
|
||||
|
||||
@property()
|
||||
src!: string;
|
||||
|
||||
subscripton?: Subscription;
|
||||
@property({ type: Number })
|
||||
width = 1280;
|
||||
|
||||
static styles = css``;
|
||||
@property({ type: Number })
|
||||
height = 720;
|
||||
|
||||
async prepareVideoPipeline() {
|
||||
if (!this.src) {
|
||||
destroyRef$ = new Subject<void>();
|
||||
|
||||
videoRef: Ref<HTMLVideoElement> = createRef();
|
||||
renderingContext = createRenderingContext();
|
||||
audioContext = new AudioContext({});
|
||||
|
||||
seeked$ = new ReplaySubject<number>(1);
|
||||
|
||||
videoFrameBuffer$ = new BehaviorSubject(new Queue<VideoFrame>());
|
||||
audioFrameBuffer$ = new BehaviorSubject(new Queue<AudioData>());
|
||||
|
||||
paused$ = new BehaviorSubject<boolean>(false);
|
||||
ended$ = new BehaviorSubject<boolean>(false);
|
||||
|
||||
currentTime$ = new BehaviorSubject<number>(0);
|
||||
duration$ = new BehaviorSubject<number>(0);
|
||||
frameRate$ = new BehaviorSubject<number>(30);
|
||||
|
||||
videoTrack$ = new BehaviorSubject<VideoTrackContext | undefined>(undefined);
|
||||
audioTrack$ = new BehaviorSubject<AudioTrackContext | undefined>(undefined);
|
||||
|
||||
private async preparePipeline() {
|
||||
const src = this.src;
|
||||
const destroyRef$ = this.destroyRef$;
|
||||
|
||||
if (!src) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { controller$ } = createEbmlController(this.src);
|
||||
|
||||
this.subscripton = controller$
|
||||
.pipe(
|
||||
switchMap(({ segments$ }) => segments$.pipe(take(1))),
|
||||
switchMap(({ seek }) => seek(0))
|
||||
)
|
||||
.subscribe((cluster) => console.log(cluster));
|
||||
|
||||
const videoDecoder = new VideoDecoder({
|
||||
output: (frame) => {},
|
||||
error: (e) => {
|
||||
e;
|
||||
const {
|
||||
segment: {
|
||||
seek,
|
||||
defaultVideoTrack$,
|
||||
defaultAudioTrack$,
|
||||
videoTrackDecoder,
|
||||
audioTrackDecoder,
|
||||
},
|
||||
totalSize,
|
||||
} = await firstValueFrom(
|
||||
createMatroska({
|
||||
url: src,
|
||||
}).pipe(throwIfEmpty(() => new Error('failed to extract matroska')))
|
||||
);
|
||||
|
||||
console.debug(`[MATROSKA]: loaded metadata, total size ${totalSize} bytes`);
|
||||
|
||||
const currentCluster$ = this.seeked$.pipe(
|
||||
switchMap((seekTime) => seek(seekTime)),
|
||||
share({
|
||||
resetOnRefCountZero: false,
|
||||
resetOnError: false,
|
||||
resetOnComplete: false,
|
||||
})
|
||||
);
|
||||
|
||||
defaultVideoTrack$
|
||||
.pipe(
|
||||
take(1),
|
||||
takeUntil(destroyRef$),
|
||||
tap((track) => console.debug('[MATROSKA]: video track loaded,', track))
|
||||
)
|
||||
.subscribe(this.videoTrack$.next.bind(this.videoTrack$));
|
||||
|
||||
defaultAudioTrack$
|
||||
.pipe(
|
||||
take(1),
|
||||
takeUntil(destroyRef$),
|
||||
tap((track) => console.debug('[MATROSKA]: audio track loaded,', track))
|
||||
)
|
||||
.subscribe(this.audioTrack$.next.bind(this.audioTrack$));
|
||||
|
||||
this.videoTrack$
|
||||
.pipe(
|
||||
takeUntil(this.destroyRef$),
|
||||
switchMap((track) =>
|
||||
track?.configuration
|
||||
? videoTrackDecoder(track, currentCluster$)
|
||||
: EMPTY
|
||||
),
|
||||
switchMap(({ frame$ }) => frame$)
|
||||
)
|
||||
.subscribe((frame) => {
|
||||
const buffer = this.videoFrameBuffer$.value;
|
||||
buffer.enqueue(frame);
|
||||
this.videoFrameBuffer$.next(buffer);
|
||||
});
|
||||
|
||||
this.audioTrack$
|
||||
.pipe(
|
||||
takeUntil(this.destroyRef$),
|
||||
switchMap((track) =>
|
||||
track?.configuration
|
||||
? audioTrackDecoder(track, currentCluster$)
|
||||
: EMPTY
|
||||
),
|
||||
switchMap(({ frame$ }) => frame$)
|
||||
)
|
||||
.subscribe((frame) => {
|
||||
const buffer = this.audioFrameBuffer$.value;
|
||||
buffer.enqueue(frame);
|
||||
this.audioFrameBuffer$.next(buffer);
|
||||
});
|
||||
|
||||
let playableStartTime = 0;
|
||||
const playable = combineLatest({
|
||||
paused: this.paused$,
|
||||
ended: this.ended$,
|
||||
audioBuffered: this.audioFrameBuffer$.pipe(
|
||||
map((q) => q.size >= 1),
|
||||
distinctUntilChanged()
|
||||
),
|
||||
videoBuffered: this.videoFrameBuffer$.pipe(
|
||||
map((q) => q.size >= 1),
|
||||
distinctUntilChanged()
|
||||
),
|
||||
}).pipe(
|
||||
takeUntil(this.destroyRef$),
|
||||
map(
|
||||
({ ended, paused, videoBuffered, audioBuffered }) =>
|
||||
!paused && !ended && !!(videoBuffered || audioBuffered)
|
||||
),
|
||||
tap((enabled) => {
|
||||
if (enabled) {
|
||||
playableStartTime = performance.now();
|
||||
}
|
||||
}),
|
||||
share()
|
||||
);
|
||||
|
||||
let nextAudioStartTime = 0;
|
||||
playable
|
||||
.pipe(
|
||||
tap(() => {
|
||||
nextAudioStartTime = 0;
|
||||
}),
|
||||
switchMap((enabled) => (enabled ? animationFrames() : EMPTY))
|
||||
)
|
||||
.subscribe(() => {
|
||||
const audioFrameBuffer = this.audioFrameBuffer$.getValue();
|
||||
const audioContext = this.audioContext;
|
||||
const nowTime = performance.now();
|
||||
const accTime = nowTime - playableStartTime;
|
||||
let audioChanged = false;
|
||||
while (audioFrameBuffer.size > 0) {
|
||||
const firstAudio = audioFrameBuffer.peek();
|
||||
if (firstAudio && firstAudio.timestamp / 1000 <= accTime) {
|
||||
const audioFrame = audioFrameBuffer.dequeue()!;
|
||||
audioChanged = true;
|
||||
if (audioContext) {
|
||||
const numberOfChannels = audioFrame.numberOfChannels;
|
||||
const sampleRate = audioFrame.sampleRate;
|
||||
const numberOfFrames = audioFrame.numberOfFrames;
|
||||
|
||||
const audioBuffer = audioContext.createBuffer(
|
||||
numberOfChannels,
|
||||
numberOfFrames,
|
||||
sampleRate
|
||||
);
|
||||
|
||||
// add fade-in-out
|
||||
const fadeLength = Math.min(50, audioFrame.numberOfFrames);
|
||||
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||
const channelData = new Float32Array(numberOfFrames);
|
||||
audioFrame.copyTo(channelData, {
|
||||
planeIndex: channel,
|
||||
frameCount: numberOfFrames,
|
||||
});
|
||||
for (let i = 0; i < fadeLength; i++) {
|
||||
channelData[i] *= i / fadeLength; // fade-in
|
||||
channelData[audioFrame.numberOfFrames - 1 - i] *=
|
||||
i / fadeLength; // fade-out
|
||||
}
|
||||
audioBuffer.copyToChannel(channelData, channel);
|
||||
}
|
||||
|
||||
/**
|
||||
* @TODO: ADD TIME SYNC
|
||||
*/
|
||||
const audioTime = audioFrame.timestamp / 1_000_000;
|
||||
|
||||
audioFrame.close();
|
||||
|
||||
if (audioContext.state === 'running') {
|
||||
const audioSource = audioContext.createBufferSource();
|
||||
audioSource.buffer = audioBuffer;
|
||||
audioSource.connect(audioContext.destination);
|
||||
const currentTime = audioContext.currentTime;
|
||||
nextAudioStartTime = Math.max(nextAudioStartTime, currentTime); // 确保不早于当前时间
|
||||
audioSource.start(nextAudioStartTime);
|
||||
nextAudioStartTime += audioBuffer.duration;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (audioChanged) {
|
||||
this.audioFrameBuffer$.next(this.audioFrameBuffer$.getValue());
|
||||
}
|
||||
});
|
||||
|
||||
playable
|
||||
.pipe(switchMap((enabled) => (enabled ? animationFrames() : EMPTY)))
|
||||
.subscribe(async () => {
|
||||
const renderingContext = this.renderingContext;
|
||||
const videoFrameBuffer = this.videoFrameBuffer$.getValue();
|
||||
let videoChanged = false;
|
||||
const nowTime = performance.now();
|
||||
const accTime = nowTime - playableStartTime;
|
||||
while (videoFrameBuffer.size > 0) {
|
||||
const firstVideo = videoFrameBuffer.peek();
|
||||
if (firstVideo && firstVideo.timestamp / 1000 <= accTime) {
|
||||
const videoFrame = videoFrameBuffer.dequeue()!;
|
||||
videoChanged = true;
|
||||
if (renderingContext) {
|
||||
const bitmap = await createImageBitmap(videoFrame);
|
||||
renderBitmapAtRenderingContext(renderingContext, bitmap);
|
||||
}
|
||||
videoFrame.close();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (videoChanged) {
|
||||
this.videoFrameBuffer$.next(videoFrameBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
fromEvent(document.body, 'click')
|
||||
.pipe(takeUntil(this.destroyRef$))
|
||||
.subscribe(async () => {
|
||||
const permissionStatus = await navigator.permissions.query({
|
||||
name: 'microphone',
|
||||
});
|
||||
if (permissionStatus.state === 'prompt') {
|
||||
await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
});
|
||||
}
|
||||
this.audioContext.resume();
|
||||
this.audioFrameBuffer$.next(this.audioFrameBuffer$.getValue());
|
||||
});
|
||||
|
||||
const permissionStatus = await navigator.permissions.query({
|
||||
name: 'microphone',
|
||||
});
|
||||
if (permissionStatus.state === 'granted') {
|
||||
await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
});
|
||||
this.audioContext.resume();
|
||||
}
|
||||
|
||||
this.seeked$.next(0);
|
||||
}
|
||||
|
||||
connectedCallback(): void {
|
||||
async connectedCallback() {
|
||||
super.connectedCallback();
|
||||
this.prepareVideoPipeline();
|
||||
await this.preparePipeline();
|
||||
}
|
||||
|
||||
disconnectedCallback(): void {
|
||||
super.disconnectedCallback();
|
||||
this.subscripton?.unsubscribe();
|
||||
this.destroyRef$.next(undefined);
|
||||
}
|
||||
|
||||
firstUpdated() {
|
||||
const video = this.videoRef.value;
|
||||
const context = this.renderingContext;
|
||||
const frameRate$ = this.frameRate$;
|
||||
const destroyRef$ = this.destroyRef$;
|
||||
const currentTime$ = this.currentTime$;
|
||||
const duration$ = this.duration$;
|
||||
const seeked$ = this.seeked$;
|
||||
|
||||
if (!video) {
|
||||
return;
|
||||
}
|
||||
const canvas = context.canvas as HTMLCanvasElement;
|
||||
|
||||
Object.defineProperty(video, 'duration', {
|
||||
get: () => duration$.value,
|
||||
set: (val: number) => {
|
||||
duration$.next(val);
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
Object.defineProperty(video, 'currentTime', {
|
||||
get: () => currentTime$.value,
|
||||
set: (val: number) => {
|
||||
currentTime$.next(val);
|
||||
seeked$.next(val);
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
frameRate$
|
||||
.pipe(takeUntil(destroyRef$), distinctUntilChanged())
|
||||
.subscribe((frameRate) => {
|
||||
canvas.width = this.width || 1;
|
||||
canvas.height = this.height || 1;
|
||||
captureCanvasAsVideoSrcObject(video, canvas, frameRate);
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
return html`<video />`;
|
||||
return html`
|
||||
<video ref=${ref(this.videoRef)} width=${this.width} height=${this.height} autoplay muted></video>
|
||||
`;
|
||||
}
|
||||
}
|
||||
|
@ -2,19 +2,25 @@
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"target": "ES2020",
|
||||
"outDir": "./dist",
|
||||
"experimentalDecorators": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"useDefineForClassFields": false,
|
||||
"paths": {
|
||||
"@/*": [
|
||||
"./src/*"
|
||||
"@konoplayer/core/*": [
|
||||
"../../packages/core/src/*"
|
||||
],
|
||||
"@konoplayer/matroska/*": [
|
||||
"../../packages/matroska/src/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../packages/core"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/matroska"
|
||||
}
|
||||
]
|
||||
}
|
@ -5,7 +5,7 @@
|
||||
}
|
||||
```
|
||||
|
||||
# ^https://konoplayer.com/api/static/*** resSpeed://1024K
|
||||
#^https://konoplayer.com/api/static/*** resSpeed://10240+
|
||||
^https://konoplayer.com/api*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5001/api$1
|
||||
^https://konoplayer.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konoplayer.com/api
|
||||
^https://konoplayer.com/*** reqHeaders://{x-forwarded.json} http://127.0.0.1:5000/$1 excludeFilter://^https://konoplayer.com/api weinre://test
|
||||
^wss://konoplayer.com/*** reqHeaders://{x-forwarded.json} ws://127.0.0.1:5000/$1 excludeFilter://^wss://konoplayer.com/api
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "proxy",
|
||||
"name": "@konoplayer/proxy",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
1
apps/test/.vitest/results.json
Normal file
1
apps/test/.vitest/results.json
Normal file
@ -0,0 +1 @@
|
||||
{"version":"3.0.9","results":[[":src/matroska/codecs/av1.spec",{"duration":52.71331099999952,"failed":false}]]}
|
17
apps/test/package.json
Normal file
17
apps/test/package.json
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "@konoplayer/test",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@konoplayer/core": "workspace:*",
|
||||
"@konoplayer/matroska": "workspace:*",
|
||||
"konoebml": "^0.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"unplugin-swc": "^1.5.1",
|
||||
"vite-tsconfig-paths": "^5.1.4",
|
||||
"vitest": "^3.0.9"
|
||||
}
|
||||
}
|
2
apps/test/resources/.gitignore
vendored
Normal file
2
apps/test/resources/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
video/huge/*
|
||||
!video/huge/.gitkeep
|
BIN
apps/test/resources/video/test-av1.mkv
Normal file
BIN
apps/test/resources/video/test-av1.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test-avc.mkv
Normal file
BIN
apps/test/resources/video/test-avc.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test-hevc.mkv
Normal file
BIN
apps/test/resources/video/test-hevc.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test-theora.mkv
Normal file
BIN
apps/test/resources/video/test-theora.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test-vp8.mkv
Normal file
BIN
apps/test/resources/video/test-vp8.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test-vp9.mkv
Normal file
BIN
apps/test/resources/video/test-vp9.mkv
Normal file
Binary file not shown.
BIN
apps/test/resources/video/test.webm
Normal file
BIN
apps/test/resources/video/test.webm
Normal file
Binary file not shown.
0
apps/test/src/init-test.ts
Normal file
0
apps/test/src/init-test.ts
Normal file
47
apps/test/src/matroska/codecs/av1.spec.ts
Normal file
47
apps/test/src/matroska/codecs/av1.spec.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
|
||||
import { VideoCodecId } from '@konoplayer/matroska/codecs';
|
||||
import {
|
||||
parseAV1DecoderConfigurationRecord,
|
||||
genCodecStringByAV1DecoderConfigurationRecord,
|
||||
} from '@konoplayer/matroska/codecs/av1';
|
||||
import { loadComponentFromRangedResource } from '../utils/data';
|
||||
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
|
||||
import { isTagIdPos } from '@konoplayer/matroska/util';
|
||||
|
||||
describe('AV1 code test', () => {
|
||||
it('should parse av1 meta from track entry', async () => {
|
||||
const [segment] = await loadComponentFromRangedResource<SegmentType>({
|
||||
resource: 'video/test-av1.mkv',
|
||||
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
|
||||
schema: SegmentSchema,
|
||||
});
|
||||
|
||||
const av1Track = segment.Tracks?.TrackEntry.find(
|
||||
(t) => t.CodecID === VideoCodecId.AV1
|
||||
)!;
|
||||
|
||||
expect(av1Track).toBeDefined();
|
||||
|
||||
expect(av1Track.CodecPrivate).toBeDefined();
|
||||
|
||||
const meta = parseAV1DecoderConfigurationRecord(av1Track)!;
|
||||
|
||||
expect(meta).toBeDefined();
|
||||
|
||||
const codecStr = genCodecStringByAV1DecoderConfigurationRecord(meta);
|
||||
|
||||
expect(meta.marker).toBe(1);
|
||||
expect(meta.version).toBe(1);
|
||||
expect(meta.seqProfile).toBe(0);
|
||||
expect(meta.seqLevelIdx0).toBe(1);
|
||||
expect(meta.seqTier0).toBe(0);
|
||||
expect(meta.highBitdepth).toBe(0);
|
||||
expect(meta.monochrome).toBe(0);
|
||||
expect(
|
||||
`${meta.chromaSubsamplingX}${meta.chromaSubsamplingY}${meta.chromaSamplePosition}`
|
||||
).toBe('110');
|
||||
expect(meta.initialPresentationDelayMinus1).toBeUndefined();
|
||||
|
||||
expect(codecStr).toBe('av01.0.01M.08.0.110');
|
||||
});
|
||||
});
|
40
apps/test/src/matroska/codecs/avc.spec.ts
Normal file
40
apps/test/src/matroska/codecs/avc.spec.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
|
||||
import { VideoCodecId } from '@konoplayer/matroska/codecs';
|
||||
import {
|
||||
parseAVCDecoderConfigurationRecord,
|
||||
genCodecStringByAVCDecoderConfigurationRecord,
|
||||
} from '@konoplayer/matroska/codecs/avc';
|
||||
import { loadComponentFromRangedResource } from '../utils/data';
|
||||
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
|
||||
import { isTagIdPos } from '@konoplayer/matroska/util';
|
||||
|
||||
describe('AVC code test', () => {
|
||||
it('should parse avc meta from track entry', async () => {
|
||||
const [segment] = await loadComponentFromRangedResource<SegmentType>({
|
||||
resource: 'video/test-avc.mkv',
|
||||
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
|
||||
schema: SegmentSchema,
|
||||
});
|
||||
|
||||
const avcTrack = segment.Tracks?.TrackEntry.find(
|
||||
(t) => t.CodecID === VideoCodecId.H264
|
||||
)!;
|
||||
|
||||
expect(avcTrack).toBeDefined();
|
||||
|
||||
expect(avcTrack.CodecPrivate).toBeDefined();
|
||||
|
||||
const meta = parseAVCDecoderConfigurationRecord(avcTrack)!;
|
||||
|
||||
expect(meta).toBeDefined();
|
||||
|
||||
const codecStr = genCodecStringByAVCDecoderConfigurationRecord(meta);
|
||||
|
||||
expect(meta.configurationVersion).toBe(1);
|
||||
expect(meta.avcProfileIndication).toBe(100);
|
||||
expect(meta.profileCompatibility).toBe(0);
|
||||
expect(meta.avcLevelIndication).toBe(30);
|
||||
|
||||
expect(codecStr).toBe('avc1.64001e');
|
||||
});
|
||||
});
|
106
apps/test/src/matroska/codecs/hevc.spec.ts
Normal file
106
apps/test/src/matroska/codecs/hevc.spec.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
|
||||
import { VideoCodecId } from '@konoplayer/matroska/codecs';
|
||||
import {
|
||||
parseHEVCDecoderConfigurationRecord,
|
||||
genCodecStringByHEVCDecoderConfigurationRecord,
|
||||
HEVCDecoderConfigurationRecordType,
|
||||
} from '@konoplayer/matroska/codecs/hevc';
|
||||
import { loadComponentFromRangedResource } from '../utils/data';
|
||||
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
|
||||
import { isTagIdPos } from '@konoplayer/matroska/util';
|
||||
import { assert } from 'vitest';
|
||||
|
||||
describe('HEVC codec test', () => {
|
||||
it('should parse hevc meta from track entry', async () => {
|
||||
const [segment] = await loadComponentFromRangedResource<SegmentType>({
|
||||
resource: 'video/test-hevc.mkv',
|
||||
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
|
||||
schema: SegmentSchema,
|
||||
});
|
||||
|
||||
const hevcTrack = segment.Tracks?.TrackEntry.find(
|
||||
(t) => t.CodecID === VideoCodecId.HEVC
|
||||
)!;
|
||||
|
||||
expect(hevcTrack).toBeDefined();
|
||||
|
||||
expect(hevcTrack.CodecPrivate).toBeDefined();
|
||||
|
||||
const meta = parseHEVCDecoderConfigurationRecord(hevcTrack);
|
||||
|
||||
expect(meta).toBeDefined();
|
||||
|
||||
const codecStr = genCodecStringByHEVCDecoderConfigurationRecord(meta);
|
||||
|
||||
expect(codecStr).toBe('hev1.1.6.L63.90');
|
||||
});
|
||||
|
||||
it('should match chrome test suite', () => {
|
||||
function makeHEVCParameterSet(
|
||||
generalProfileSpace: number,
|
||||
generalProfileIDC: number,
|
||||
generalProfileCompatibilityFlags: number,
|
||||
generalTierFlag: number,
|
||||
generalConstraintIndicatorFlags: [
|
||||
number,
|
||||
number,
|
||||
number,
|
||||
number,
|
||||
number,
|
||||
number,
|
||||
],
|
||||
generalLevelIDC: number
|
||||
) {
|
||||
return {
|
||||
generalProfileSpace: generalProfileSpace,
|
||||
generalProfileIdc: generalProfileIDC,
|
||||
generalProfileCompatibilityFlags: generalProfileCompatibilityFlags,
|
||||
generalTierFlag: generalTierFlag,
|
||||
generalConstraintIndicatorFlags: Number(
|
||||
new DataView(
|
||||
new Uint8Array([0, 0, ...generalConstraintIndicatorFlags]).buffer
|
||||
).getBigUint64(0, false)
|
||||
),
|
||||
generalLevelIdc: generalLevelIDC,
|
||||
} as unknown as HEVCDecoderConfigurationRecordType;
|
||||
}
|
||||
|
||||
assert(
|
||||
genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
makeHEVCParameterSet(0, 1, 0x60000000, 0, [0, 0, 0, 0, 0, 0], 93)
|
||||
),
|
||||
'hev1.1.6.L93'
|
||||
);
|
||||
assert(
|
||||
genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
makeHEVCParameterSet(1, 4, 0x82000000, 1, [0, 0, 0, 0, 0, 0], 120)
|
||||
),
|
||||
'hev1.A4.41.H120'
|
||||
);
|
||||
assert(
|
||||
genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
makeHEVCParameterSet(0, 1, 0x60000000, 0, [176, 0, 0, 0, 0, 0], 93)
|
||||
),
|
||||
'hev1.1.6.L93.B0'
|
||||
);
|
||||
assert(
|
||||
genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
makeHEVCParameterSet(1, 4, 0x82000000, 1, [176, 35, 0, 0, 0, 0], 120)
|
||||
),
|
||||
'hev1.A4.41.H120.B0.23'
|
||||
);
|
||||
assert(
|
||||
genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
makeHEVCParameterSet(
|
||||
2,
|
||||
1,
|
||||
0xf77db57b,
|
||||
1,
|
||||
[18, 52, 86, 120, 154, 188],
|
||||
254
|
||||
)
|
||||
),
|
||||
'hev1.B1.DEADBEEF.H254.12.34.56.78.9A.BC'
|
||||
);
|
||||
});
|
||||
});
|
54
apps/test/src/matroska/codecs/vp9.spec.ts
Normal file
54
apps/test/src/matroska/codecs/vp9.spec.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { SegmentSchema, SegmentType } from '@konoplayer/matroska/schema';
|
||||
import { VideoCodecId } from '@konoplayer/matroska/codecs';
|
||||
import {
|
||||
genCodecStringByVP9DecoderConfigurationRecord,
|
||||
parseVP9DecoderConfigurationRecord,
|
||||
VP9ColorSpaceEnum,
|
||||
VP9Subsampling,
|
||||
} from '@konoplayer/matroska/codecs/vp9';
|
||||
import { loadComponentFromRangedResource } from '../utils/data';
|
||||
import { EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
|
||||
import { isTagIdPos } from '@konoplayer/matroska/util';
|
||||
|
||||
describe('VP9 code test', () => {
|
||||
it('should parse vp9 meta from track entry and keyframe', async () => {
|
||||
const [segment] = await loadComponentFromRangedResource<SegmentType>({
|
||||
resource: 'video/test-vp9.mkv',
|
||||
predicate: isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.End),
|
||||
schema: SegmentSchema,
|
||||
});
|
||||
|
||||
const vp9Track = segment.Tracks?.TrackEntry.find(
|
||||
(t) => t.CodecID === VideoCodecId.VP9
|
||||
)!;
|
||||
|
||||
expect(vp9Track).toBeDefined();
|
||||
|
||||
expect(vp9Track.CodecPrivate).toBeFalsy();
|
||||
|
||||
const keyframe = segment
|
||||
.Cluster!.flatMap((c) => c.SimpleBlock || [])
|
||||
.find((b) => b.keyframe && b.track === vp9Track.TrackNumber)!;
|
||||
|
||||
expect(keyframe).toBeDefined();
|
||||
expect(keyframe.frames.length).toBe(1);
|
||||
|
||||
const meta = parseVP9DecoderConfigurationRecord(
|
||||
vp9Track,
|
||||
keyframe.frames[0]
|
||||
)!;
|
||||
|
||||
expect(meta).toBeDefined();
|
||||
|
||||
expect(meta.bitDepth).toBe(8);
|
||||
expect(meta.subsampling).toBe(VP9Subsampling.YUV420);
|
||||
expect(meta.width).toBe(640);
|
||||
expect(meta.height).toBe(360);
|
||||
expect(meta.colorSpace).toBe(VP9ColorSpaceEnum.BT_601);
|
||||
expect(meta.profile).toBe(0);
|
||||
|
||||
const codecStr = genCodecStringByVP9DecoderConfigurationRecord(meta);
|
||||
|
||||
expect(codecStr).toBe('vp09.00.21.08');
|
||||
});
|
||||
});
|
56
apps/test/src/matroska/utils/data.ts
Normal file
56
apps/test/src/matroska/utils/data.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { Type } from 'arktype';
|
||||
import { EbmlStreamDecoder, EbmlTagPosition, EbmlTagType } from 'konoebml';
|
||||
import { convertEbmlTagToComponent } from '@konoplayer/matroska/util';
|
||||
import fs from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { TransformStream } from 'node:stream/web';
|
||||
import path from 'node:path';
|
||||
|
||||
export interface LoadRangedResourceOptions<S extends Type<any> = any> {
|
||||
resource: string;
|
||||
byteStart?: number;
|
||||
byteEnd?: number;
|
||||
schema?: S;
|
||||
predicate?: (tag: EbmlTagType) => boolean;
|
||||
}
|
||||
|
||||
export async function loadComponentFromRangedResource<
|
||||
T,
|
||||
S extends Type<any> = any,
|
||||
>({
|
||||
resource,
|
||||
byteStart,
|
||||
byteEnd,
|
||||
predicate = (tag) => !tag?.parent && tag.position !== EbmlTagPosition.Start,
|
||||
schema,
|
||||
}: LoadRangedResourceOptions<S>): Promise<T[]> {
|
||||
const input = Readable.toWeb(
|
||||
fs.createReadStream(
|
||||
path.join(import.meta.dirname, '..', '..', '..', 'resources', resource),
|
||||
{
|
||||
start: byteStart,
|
||||
end: byteEnd,
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const output = input.pipeThrough(
|
||||
new EbmlStreamDecoder({
|
||||
streamStartOffset: byteStart,
|
||||
collectChild: true,
|
||||
}) as unknown as TransformStream<Uint8Array, EbmlTagType>
|
||||
);
|
||||
|
||||
const result: T[] = [];
|
||||
|
||||
for await (const t of output) {
|
||||
if (predicate(t)) {
|
||||
let component = convertEbmlTagToComponent(t) as T;
|
||||
if (schema) {
|
||||
component = schema.assert(component);
|
||||
}
|
||||
result.push(component);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
30
apps/test/tsconfig.json
Normal file
30
apps/test/tsconfig.json
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"outDir": "./dist",
|
||||
"types": [
|
||||
"vitest/globals",
|
||||
"node"
|
||||
],
|
||||
"paths": {
|
||||
"@konoplayer/core/*": [
|
||||
"../../packages/core/src/*"
|
||||
],
|
||||
"@konoplayer/matroska/*": [
|
||||
"../../packages/matroska/src/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../packages/core"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/matroska"
|
||||
}
|
||||
]
|
||||
}
|
33
apps/test/vitest.config.ts
Normal file
33
apps/test/vitest.config.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import swc from 'unplugin-swc';
|
||||
import tsconfigPaths from 'vite-tsconfig-paths';
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
cacheDir: '.vitest',
|
||||
test: {
|
||||
setupFiles: ['src/init-test'],
|
||||
environment: 'happy-dom',
|
||||
include: ['src/**/*.spec'],
|
||||
globals: true,
|
||||
restoreMocks: true,
|
||||
coverage: {
|
||||
// you can include other reporters, but 'json-summary' is required, json is recommended
|
||||
reporter: ['text', 'json-summary', 'json'],
|
||||
// If you want a coverage reports even if your tests are failing, include the reportOnFailure option
|
||||
reportOnFailure: true,
|
||||
include: ['../../packages/core/src/**', '../../packages/matroska/src/**'],
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
tsconfigPaths(),
|
||||
swc.vite({
|
||||
include: /\.[mc]?[jt]sx?$/,
|
||||
// for git+ package only
|
||||
exclude: [
|
||||
/node_modules\/(?!@konoplayer|\.pnpm)/,
|
||||
/node_modules\/\.pnpm\/(?!@konoplayer)/,
|
||||
] as any,
|
||||
tsconfigFile: './tsconfig.json',
|
||||
}),
|
||||
],
|
||||
});
|
37
biome.jsonc
37
biome.jsonc
@ -6,6 +6,7 @@
|
||||
"linter": {
|
||||
"rules": {
|
||||
"style": {
|
||||
"useSingleCaseStatement": "off",
|
||||
"noParameterProperties": "off",
|
||||
"noNonNullAssertion": "off"
|
||||
},
|
||||
@ -16,7 +17,13 @@
|
||||
"noSvgWithoutTitle": "off"
|
||||
},
|
||||
"complexity": {
|
||||
"noBannedTypes": "off"
|
||||
"noBannedTypes": "off",
|
||||
"noExcessiveCognitiveComplexity": {
|
||||
"level": "warn",
|
||||
"options": {
|
||||
"maxAllowedComplexity": 40
|
||||
}
|
||||
}
|
||||
},
|
||||
"nursery": {
|
||||
"noEnum": "off",
|
||||
@ -51,6 +58,34 @@
|
||||
],
|
||||
"linter": {
|
||||
"rules": {
|
||||
"suspicious": {
|
||||
"noConsole": "off",
|
||||
"noConsoleLog": "off"
|
||||
},
|
||||
"performance": {
|
||||
"useTopLevelRegex": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"include": [
|
||||
"apps/test/**"
|
||||
],
|
||||
"javascript": {
|
||||
"globals": [
|
||||
"describe",
|
||||
"beforeEach",
|
||||
"it",
|
||||
"expect",
|
||||
"afterEach"
|
||||
]
|
||||
},
|
||||
"linter": {
|
||||
"rules": {
|
||||
"style": {
|
||||
"useImportType": "off"
|
||||
},
|
||||
"suspicious": {
|
||||
"noConsole": "off"
|
||||
},
|
||||
|
7
justfile
7
justfile
@ -2,7 +2,10 @@ set windows-shell := ["pwsh.exe", "-c"]
|
||||
set dotenv-load := true
|
||||
|
||||
dev-playground:
|
||||
pnpm run --filter=playground dev
|
||||
pnpm run --filter=@konoplayer/playground dev
|
||||
|
||||
dev-proxy:
|
||||
pnpm run --filter proxy --filter mock dev
|
||||
pnpm run --filter=@konoplayer/proxy --filter=@konoplayer/mock dev
|
||||
|
||||
download-samples:
|
||||
pnpm run download-samples
|
11
package.json
11
package.json
@ -3,7 +3,8 @@
|
||||
"version": "0.0.1",
|
||||
"description": "A strange player, like the dumtruck, taking you to Isekai.",
|
||||
"scripts": {
|
||||
"codegen-mkv": "tsx --tsconfig=./tsconfig.scripts.json ./scripts/codegen-mkv.ts"
|
||||
"codegen-mkv": "tsx --tsconfig=./tsconfig.scripts.json ./scripts/codegen-mkv",
|
||||
"download-samples": "tsx --tsconfig=./tsconfig.scripts.json ./scripts/download-samples"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "lonelyhentxi",
|
||||
@ -15,17 +16,19 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/node": "^22.13.8",
|
||||
"@types/node": "^22.13.11",
|
||||
"@webgpu/types": "^0.1.59",
|
||||
"change-case": "^5.4.4",
|
||||
"happy-dom": "^17.4.4",
|
||||
"tsx": "^4.19.2",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "^5.8.2",
|
||||
"ultracite": "^4.1.15"
|
||||
"ultracite": "^4.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"arktype": "^2.1.10",
|
||||
"lodash-es": "^4.17.21",
|
||||
"media-codecs": "^2.0.2",
|
||||
"mnemonist": "^0.40.3",
|
||||
"rxjs": "^7.8.2",
|
||||
"type-fest": "^4.37.0"
|
||||
|
8
packages/core/package.json
Normal file
8
packages/core/package.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "@konoplayer/core",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {},
|
||||
"dependencies": {}
|
||||
}
|
40
packages/core/src/audition/index.ts
Normal file
40
packages/core/src/audition/index.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import {map, Observable, Subject} from 'rxjs';
|
||||
|
||||
|
||||
// biome-ignore lint/correctness/noUndeclaredVariables: <explanation>
|
||||
export function createAudioDecodeStream(configuration: AudioDecoderConfig): Observable<{
|
||||
decoder: AudioDecoder;
|
||||
frame$: Observable<AudioData>;
|
||||
}> {
|
||||
const frame$ = new Subject<AudioData>()
|
||||
const decoder$ = new Observable<AudioDecoder>((subscriber) => {
|
||||
let isFinalized = false;
|
||||
const decoder = new AudioDecoder({
|
||||
output: (frame) => frame$.next(frame),
|
||||
error: (e) => {
|
||||
if (!isFinalized) {
|
||||
isFinalized = true;
|
||||
frame$.error(e);
|
||||
subscriber.error(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
decoder.configure(configuration);
|
||||
|
||||
subscriber.next(decoder);
|
||||
|
||||
return () => {
|
||||
if (!isFinalized) {
|
||||
isFinalized = true;
|
||||
frame$.complete();
|
||||
decoder.close();
|
||||
}
|
||||
};
|
||||
})
|
||||
|
||||
return decoder$.pipe(map((decoder) => ({
|
||||
decoder,
|
||||
frame$
|
||||
})));
|
||||
}
|
32
packages/core/src/codecs/audio-codecs.ts
Normal file
32
packages/core/src/codecs/audio-codecs.ts
Normal file
@ -0,0 +1,32 @@
|
||||
export enum AudioCodec {
|
||||
Unknown = 0,
|
||||
AAC = 1,
|
||||
MP3 = 2,
|
||||
PCM = 3,
|
||||
Vorbis = 4,
|
||||
FLAC = 5,
|
||||
AMR_NB = 6,
|
||||
AMR_WB = 7,
|
||||
PCM_MULAW = 8,
|
||||
GSM_MS = 9,
|
||||
PCM_S16BE = 10,
|
||||
PCM_S24BE = 11,
|
||||
Opus = 12,
|
||||
EAC3 = 13,
|
||||
PCM_ALAW = 14,
|
||||
ALAC = 15,
|
||||
AC3 = 16,
|
||||
MpegHAudio = 17,
|
||||
DTS = 18,
|
||||
DTSXP2 = 19,
|
||||
DTSE = 20,
|
||||
AC4 = 21,
|
||||
IAMF = 22,
|
||||
PCM_S32BE = 23,
|
||||
PCM_S32LE = 24,
|
||||
PCM_S24LE = 25,
|
||||
PCM_S16LE = 26,
|
||||
PCM_F32BE = 27,
|
||||
PCM_F32LE = 28,
|
||||
MaxValue = PCM_F32LE, // Must equal the last "real" codec above.
|
||||
}
|
2
packages/core/src/codecs/index.ts
Normal file
2
packages/core/src/codecs/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export { AudioCodec } from './audio-codecs';
|
||||
export { VideoCodec } from './video-codecs';
|
97
packages/core/src/codecs/video-codecs.ts
Normal file
97
packages/core/src/codecs/video-codecs.ts
Normal file
@ -0,0 +1,97 @@
|
||||
export enum VideoCodec {
|
||||
Unknown = 0,
|
||||
H264 = 1,
|
||||
VC1 = 2,
|
||||
MPEG2 = 3,
|
||||
MPEG4 = 4,
|
||||
Theora = 5,
|
||||
VP8 = 6,
|
||||
VP9 = 7,
|
||||
HEVC = 8,
|
||||
DolbyVision = 9,
|
||||
AV1 = 10,
|
||||
MaxValue = AV1, // Must equal the last "real" codec above.
|
||||
}
|
||||
|
||||
export enum VideoCodecProfile {
|
||||
VIDEO_CODEC_PROFILE_UNKNOWN = -1,
|
||||
VIDEO_CODEC_PROFILE_MIN = VIDEO_CODEC_PROFILE_UNKNOWN,
|
||||
H264PROFILE_MIN = 0,
|
||||
H264PROFILE_BASELINE = H264PROFILE_MIN,
|
||||
H264PROFILE_MAIN = 1,
|
||||
H264PROFILE_EXTENDED = 2,
|
||||
H264PROFILE_HIGH = 3,
|
||||
H264PROFILE_HIGH10PROFILE = 4,
|
||||
H264PROFILE_HIGH422PROFILE = 5,
|
||||
H264PROFILE_HIGH444PREDICTIVEPROFILE = 6,
|
||||
H264PROFILE_SCALABLEBASELINE = 7,
|
||||
H264PROFILE_SCALABLEHIGH = 8,
|
||||
H264PROFILE_STEREOHIGH = 9,
|
||||
H264PROFILE_MULTIVIEWHIGH = 10,
|
||||
H264PROFILE_MAX = H264PROFILE_MULTIVIEWHIGH,
|
||||
VP8PROFILE_MIN = 11,
|
||||
VP8PROFILE_ANY = VP8PROFILE_MIN,
|
||||
VP8PROFILE_MAX = VP8PROFILE_ANY,
|
||||
VP9PROFILE_MIN = 12,
|
||||
VP9PROFILE_PROFILE0 = VP9PROFILE_MIN,
|
||||
VP9PROFILE_PROFILE1 = 13,
|
||||
VP9PROFILE_PROFILE2 = 14,
|
||||
VP9PROFILE_PROFILE3 = 15,
|
||||
VP9PROFILE_MAX = VP9PROFILE_PROFILE3,
|
||||
HEVCPROFILE_MIN = 16,
|
||||
HEVCPROFILE_MAIN = HEVCPROFILE_MIN,
|
||||
HEVCPROFILE_MAIN10 = 17,
|
||||
HEVCPROFILE_MAIN_STILL_PICTURE = 18,
|
||||
HEVCPROFILE_MAX = HEVCPROFILE_MAIN_STILL_PICTURE,
|
||||
DOLBYVISION_PROFILE0 = 19,
|
||||
// Deprecated: DOLBYVISION_PROFILE4 = 20,
|
||||
DOLBYVISION_PROFILE5 = 21,
|
||||
DOLBYVISION_PROFILE7 = 22,
|
||||
THEORAPROFILE_MIN = 23,
|
||||
THEORAPROFILE_ANY = THEORAPROFILE_MIN,
|
||||
THEORAPROFILE_MAX = THEORAPROFILE_ANY,
|
||||
AV1PROFILE_MIN = 24,
|
||||
AV1PROFILE_PROFILE_MAIN = AV1PROFILE_MIN,
|
||||
AV1PROFILE_PROFILE_HIGH = 25,
|
||||
AV1PROFILE_PROFILE_PRO = 26,
|
||||
AV1PROFILE_MAX = AV1PROFILE_PROFILE_PRO,
|
||||
DOLBYVISION_PROFILE8 = 27,
|
||||
DOLBYVISION_PROFILE9 = 28,
|
||||
HEVCPROFILE_EXT_MIN = 29,
|
||||
HEVCPROFILE_REXT = HEVCPROFILE_EXT_MIN,
|
||||
HEVCPROFILE_HIGH_THROUGHPUT = 30,
|
||||
HEVCPROFILE_MULTIVIEW_MAIN = 31,
|
||||
HEVCPROFILE_SCALABLE_MAIN = 32,
|
||||
HEVCPROFILE_3D_MAIN = 33,
|
||||
HEVCPROFILE_SCREEN_EXTENDED = 34,
|
||||
HEVCPROFILE_SCALABLE_REXT = 35,
|
||||
HEVCPROFILE_HIGH_THROUGHPUT_SCREEN_EXTENDED = 36,
|
||||
HEVCPROFILE_EXT_MAX = HEVCPROFILE_HIGH_THROUGHPUT_SCREEN_EXTENDED,
|
||||
VVCPROFILE_MIN = 37,
|
||||
VVCPROFILE_MAIN10 = VVCPROFILE_MIN,
|
||||
VVCPROFILE_MAIN12 = 38,
|
||||
VVCPROFILE_MAIN12_INTRA = 39,
|
||||
VVCPROIFLE_MULTILAYER_MAIN10 = 40,
|
||||
VVCPROFILE_MAIN10_444 = 41,
|
||||
VVCPROFILE_MAIN12_444 = 42,
|
||||
VVCPROFILE_MAIN16_444 = 43,
|
||||
VVCPROFILE_MAIN12_444_INTRA = 44,
|
||||
VVCPROFILE_MAIN16_444_INTRA = 45,
|
||||
VVCPROFILE_MULTILAYER_MAIN10_444 = 46,
|
||||
VVCPROFILE_MAIN10_STILL_PICTURE = 47,
|
||||
VVCPROFILE_MAIN12_STILL_PICTURE = 48,
|
||||
VVCPROFILE_MAIN10_444_STILL_PICTURE = 49,
|
||||
VVCPROFILE_MAIN12_444_STILL_PICTURE = 50,
|
||||
VVCPROFILE_MAIN16_444_STILL_PICTURE = 51,
|
||||
VVCPROFILE_MAX = VVCPROFILE_MAIN16_444_STILL_PICTURE,
|
||||
VIDEO_CODEC_PROFILE_MAX = VVCPROFILE_MAIN16_444_STILL_PICTURE,
|
||||
}
|
||||
|
||||
export type VideoCodecLevel = number; // uint32
|
||||
export const NoVideoCodecLevel: VideoCodecLevel = 0;
|
||||
|
||||
export type VideoCodecProfileLevel = {
|
||||
codec: VideoCodec;
|
||||
profile: VideoCodecProfile;
|
||||
level: VideoCodecLevel;
|
||||
};
|
39
packages/core/src/data/bit.ts
Normal file
39
packages/core/src/data/bit.ts
Normal file
@ -0,0 +1,39 @@
|
||||
export class BitReader {
|
||||
private data: Uint8Array;
|
||||
private byteOffset = 0;
|
||||
private bitOffset = 0;
|
||||
|
||||
constructor(data: Uint8Array) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
readBits(numBits: number): number {
|
||||
let value = 0;
|
||||
for (let i = 0; i < numBits; i++) {
|
||||
const bit = (this.data[this.byteOffset] >> (7 - this.bitOffset)) & 1;
|
||||
value = (value << 1) | bit;
|
||||
this.bitOffset++;
|
||||
if (this.bitOffset === 8) {
|
||||
this.bitOffset = 0;
|
||||
this.byteOffset++;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
skipBits(numBits: number): void {
|
||||
this.bitOffset += numBits;
|
||||
while (this.bitOffset >= 8) {
|
||||
this.bitOffset -= 8;
|
||||
this.byteOffset++;
|
||||
}
|
||||
}
|
||||
|
||||
hasData(): boolean {
|
||||
return this.byteOffset < this.data.length;
|
||||
}
|
||||
|
||||
getRemainingBytes(): Uint8Array {
|
||||
return this.data.slice(this.byteOffset);
|
||||
}
|
||||
}
|
@ -5,11 +5,17 @@ export interface RangedStream {
|
||||
totalSize?: number;
|
||||
}
|
||||
|
||||
export async function createRangedStream(
|
||||
url: string,
|
||||
export interface CreateRangedStreamOptions {
|
||||
url: string;
|
||||
byteStart?: number;
|
||||
byteEnd?: number;
|
||||
}
|
||||
|
||||
export async function createRangedStream({
|
||||
url,
|
||||
byteStart = 0,
|
||||
byteEnd?: number
|
||||
) {
|
||||
byteEnd,
|
||||
}: CreateRangedStreamOptions) {
|
||||
const controller = new AbortController();
|
||||
const signal = controller.signal;
|
||||
const headers = new Headers();
|
6
packages/core/src/data/index.ts
Normal file
6
packages/core/src/data/index.ts
Normal file
@ -0,0 +1,6 @@
|
||||
export {
|
||||
type RangedStream,
|
||||
type CreateRangedStreamOptions,
|
||||
createRangedStream,
|
||||
} from './fetch';
|
||||
export { BitReader } from './bit';
|
25
packages/core/src/errors.ts
Normal file
25
packages/core/src/errors.ts
Normal file
@ -0,0 +1,25 @@
|
||||
export class UnsupportedCodecError extends Error {
|
||||
constructor(codec: string, context: string) {
|
||||
super(`codec ${codec} is not supported in ${context} context`);
|
||||
}
|
||||
}
|
||||
|
||||
export class ParseCodecError extends Error {
|
||||
constructor(codec: string, detail: string) {
|
||||
super(`code ${codec} private parse failed: ${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class UnreachableOrLogicError extends Error {
|
||||
constructor(detail: string) {
|
||||
super(`unreachable or logic error: ${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class ParseCodecErrors extends Error {
|
||||
cause: Error[] = [];
|
||||
|
||||
constructor() {
|
||||
super('failed to parse codecs');
|
||||
}
|
||||
}
|
80
packages/core/src/graphics/index.ts
Normal file
80
packages/core/src/graphics/index.ts
Normal file
@ -0,0 +1,80 @@
|
||||
import {map, Observable, Subject} from 'rxjs';
|
||||
|
||||
export type RenderingContext =
|
||||
| ImageBitmapRenderingContext
|
||||
| CanvasRenderingContext2D;
|
||||
|
||||
export function createRenderingContext(): RenderingContext {
|
||||
const canvas = document.createElement('canvas');
|
||||
const context =
|
||||
canvas.getContext('bitmaprenderer') || canvas.getContext('2d');
|
||||
if (!context) {
|
||||
throw new DOMException(
|
||||
'can not get rendering context of canvas',
|
||||
'CanvasException'
|
||||
);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
export function renderBitmapAtRenderingContext(
|
||||
context: RenderingContext,
|
||||
bitmap: ImageBitmap
|
||||
) {
|
||||
const canvas = context.canvas;
|
||||
if (bitmap.width !== canvas.width || bitmap.height !== canvas.height) {
|
||||
canvas.width = bitmap.width;
|
||||
canvas.height = bitmap.height;
|
||||
}
|
||||
if (context instanceof ImageBitmapRenderingContext) {
|
||||
context.transferFromImageBitmap(bitmap);
|
||||
} else {
|
||||
context.drawImage(bitmap, 0, 0, bitmap.width, bitmap.height);
|
||||
bitmap.close();
|
||||
}
|
||||
}
|
||||
|
||||
export function captureCanvasAsVideoSrcObject(
|
||||
video: HTMLVideoElement,
|
||||
canvas: HTMLCanvasElement,
|
||||
frameRate: number
|
||||
) {
|
||||
video.srcObject = canvas.captureStream(frameRate);
|
||||
}
|
||||
|
||||
export function createVideoDecodeStream(configuration: VideoDecoderConfig): Observable<{
|
||||
decoder: VideoDecoder;
|
||||
frame$: Observable<VideoFrame>;
|
||||
}> {
|
||||
const frame$ = new Subject<VideoFrame>()
|
||||
const decoder$ = new Observable<VideoDecoder>((subscriber) => {
|
||||
let isFinalized = false;
|
||||
const decoder = new VideoDecoder({
|
||||
output: (frame) => frame$.next(frame),
|
||||
error: (e) => {
|
||||
if (!isFinalized) {
|
||||
isFinalized = true;
|
||||
frame$.error(e);
|
||||
subscriber.error(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
decoder.configure(configuration);
|
||||
|
||||
subscriber.next(decoder);
|
||||
|
||||
return () => {
|
||||
if (!isFinalized) {
|
||||
isFinalized = true;
|
||||
frame$.complete();
|
||||
decoder.close();
|
||||
}
|
||||
};
|
||||
})
|
||||
|
||||
return decoder$.pipe(map((decoder) => ({
|
||||
decoder,
|
||||
frame$
|
||||
})));
|
||||
}
|
10
packages/core/tsconfig.json
Normal file
10
packages/core/tsconfig.json
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"outDir": "./dist"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
]
|
||||
}
|
7
packages/ffmpeg/Cargo.toml
Normal file
7
packages/ffmpeg/Cargo.toml
Normal file
@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "konoplayer-ffmpeg"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
ffmpeg-sys-next = { version = "7.1.0", features = ["avcodec"] }
|
85
packages/ffmpeg/src/lib.rs
Normal file
85
packages/ffmpeg/src/lib.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use ffmpeg_sys_next as ffmpeg;
|
||||
|
||||
fn get_webcodecs_codec(codec_id: &str, codec_private: &[u8]) -> Result<String, String> {
|
||||
unsafe {
|
||||
// 根据 CodecID 获取 AVCodec
|
||||
let codec = match codec_id {
|
||||
"V_VP8" => ffmpeg::AVCodecID::AV_CODEC_ID_VP8,
|
||||
"V_VP9" => ffmpeg::AVCodecID::AV_CODEC_ID_VP9,
|
||||
"V_MPEG4/ISO/AVC" => ffmpeg::AVCodecID::AV_CODEC_ID_H264,
|
||||
"V_MPEGH/ISO/HEVC" => ffmpeg::AVCodecID::AV_CODEC_ID_HEVC,
|
||||
_ => return Err(format!("Unsupported CodecID: {}", codec_id)),
|
||||
};
|
||||
|
||||
let av_codec = ffmpeg::avcodec_find_decoder(codec);
|
||||
if av_codec.is_null() {
|
||||
return Err("Codec not found".to_string());
|
||||
}
|
||||
|
||||
let context = ffmpeg::avcodec_alloc_context3(av_codec);
|
||||
if context.is_null() {
|
||||
return Err("Failed to allocate context".to_string());
|
||||
}
|
||||
|
||||
// 设置 CodecPrivate 数据
|
||||
(*context).extradata = codec_private.as_ptr() as *mut u8;
|
||||
(*context).extradata_size = codec_private.len() as i32;
|
||||
|
||||
// 解析参数
|
||||
match codec_id {
|
||||
"V_VP9" => {
|
||||
// VP9: 假设默认值,实际需解析帧数据
|
||||
Ok("vp09.00.10.08".to_string())
|
||||
}
|
||||
"V_MPEG4/ISO/AVC" => {
|
||||
let profile = (*context).profile; // FFmpeg 提供 profile
|
||||
let level = (*context).level;
|
||||
Ok(format!("avc1.{:02x}00{:02x}", profile, level))
|
||||
}
|
||||
"V_MPEGH/ISO/HEVC" => {
|
||||
let profile = (*context).profile;
|
||||
let level = (*context).level;
|
||||
Ok(format!("hev1.{}.0.{}.B0", profile, level))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(left: u64, right: u64) -> u64 {
|
||||
left + right
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = add(2, 2);
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn main() {
|
||||
let codec_id = "V_MPEGH/ISO/HEVC";
|
||||
let codec_private = vec![
|
||||
1, 2, 32, 0, 0, 0, 144, 0, 0, 0, 0, 0, 120, 240, 0, 252, 253, 250, 250, 0, 0, 15, 4,
|
||||
160, 0, 1, 0, 25, 64, 1, 12, 1, 255, 255, 2, 32, 0, 0, 3, 0, 144, 0, 0, 3, 0, 0, 3, 0,
|
||||
120, 153, 138, 2, 64, 161, 0, 1, 0, 44, 66, 1, 1, 2, 32, 0, 0, 3, 0, 144, 0, 0, 3, 0,
|
||||
0, 3, 0, 120, 160, 3, 192, 128, 16, 228, 217, 102, 98, 174, 70, 194, 166, 160, 32, 32,
|
||||
60, 32, 0, 0, 125, 32, 0, 11, 184, 1, 162, 0, 1, 0, 9, 68, 1, 193, 114, 138, 86, 113,
|
||||
178, 64, 167, 0, 1, 0, 121, 78, 1, 5, 116, 44, 162, 222, 9, 181, 23, 71, 219, 187, 85,
|
||||
164, 254, 127, 194, 252, 78, 120, 50, 54, 53, 32, 45, 32, 45, 32, 72, 46, 50, 54, 53,
|
||||
47, 72, 69, 86, 67, 32, 99, 111, 100, 101, 99, 32, 45, 32, 67, 111, 112, 121, 114, 105,
|
||||
103, 104, 116, 32, 50, 48, 49, 51, 45, 50, 48, 49, 56, 32, 40, 99, 41, 32, 77, 117,
|
||||
108, 116, 105, 99, 111, 114, 101, 119, 97, 114, 101, 44, 32, 73, 110, 99, 32, 45, 32,
|
||||
104, 116, 116, 112, 58, 47, 47, 120, 50, 54, 53, 46, 111, 114, 103, 32, 45, 32, 111,
|
||||
112, 116, 105, 111, 110, 115, 58, 32, 128,
|
||||
];
|
||||
match get_webcodecs_codec(codec_id, &codec_private) {
|
||||
Ok(codec) => println!("WebCodecs codec: {}", codec),
|
||||
Err(e) => eprintln!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
11
packages/matroska/package.json
Normal file
11
packages/matroska/package.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "@konoplayer/matroska",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@konoplayer/core": "workspace:*",
|
||||
"konoebml": "^0.1.2"
|
||||
}
|
||||
}
|
124
packages/matroska/src/codecs/aac.ts
Normal file
124
packages/matroska/src/codecs/aac.ts
Normal file
@ -0,0 +1,124 @@
|
||||
import { ParseCodecError } from '@konoplayer/core/errors';
|
||||
import { ArkErrors, type } from 'arktype';
|
||||
|
||||
export const AAC_CODEC_TYPE = 'AAC';
|
||||
|
||||
export const AudioObjectTypeSchema = type('1 | 2 | 3 | 4 | 5 | 29 | 67 | 23');
|
||||
|
||||
export const SamplingFrequencyIndexSchema = type(
|
||||
'1 | 2 | 3 | 4 |5|6|7|8|9|10|11|12'
|
||||
);
|
||||
|
||||
export const ChannelConfigurationSchema = type('1 | 2 | 3 | 4 | 5 | 6 | 7');
|
||||
|
||||
export const AudioSpecificConfigSchema = type({
|
||||
audioObjectType: AudioObjectTypeSchema, // AAC profiles: Main, LC, SSR, LTP, HE, HE v2
|
||||
samplingFrequencyIndex: SamplingFrequencyIndexSchema.optional(), // Sampling rate index
|
||||
channelConfiguration: ChannelConfigurationSchema, // Channel config (1-7)
|
||||
sbrPresent: type.boolean.optional(), // Optional: Indicates SBR presence
|
||||
psPresent: type.boolean.optional(), // Optional: Indicates PS presence (for HE-AAC v2)
|
||||
});
|
||||
|
||||
export type AudioSpecificConfigType = typeof AudioSpecificConfigSchema.infer;
|
||||
|
||||
/**
|
||||
* Parse AudioSpecificConfig from codec_private Uint8Array
|
||||
* @param codecPrivate - Uint8Array containing codec_private data
|
||||
* @returns Parsed AudioSpecificConfig or throws an error if invalid
|
||||
*/
|
||||
export function parseAudioSpecificConfig(
|
||||
codecPrivate: Uint8Array
|
||||
): AudioSpecificConfigType {
|
||||
if (codecPrivate.length < 2) {
|
||||
throw new ParseCodecError(AAC_CODEC_TYPE, 'codec_private data too short');
|
||||
}
|
||||
|
||||
// Create a DataView for bit-level manipulation
|
||||
const view = new DataView(
|
||||
codecPrivate.buffer,
|
||||
codecPrivate.byteOffset,
|
||||
codecPrivate.byteLength
|
||||
);
|
||||
let byteOffset = 0;
|
||||
let bitOffset = 0;
|
||||
|
||||
// Helper function to read specific number of bits
|
||||
function readBits(bits: number): number {
|
||||
let value = 0;
|
||||
for (let i = 0; i < bits; i++) {
|
||||
const byte = view.getUint8(byteOffset);
|
||||
const bit = (byte >> (7 - bitOffset)) & 1;
|
||||
value = (value << 1) | bit;
|
||||
bitOffset++;
|
||||
if (bitOffset === 8) {
|
||||
bitOffset = 0;
|
||||
byteOffset++;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
// Read 5 bits for audioObjectType
|
||||
const audioObjectType = readBits(5);
|
||||
|
||||
// Read 4 bits for samplingFrequencyIndex
|
||||
const samplingFrequencyIndex = readBits(4);
|
||||
|
||||
// Read 4 bits for channelConfiguration
|
||||
const channelConfiguration = readBits(4);
|
||||
|
||||
// Check for SBR/PS extension (if audioObjectType indicates HE-AAC)
|
||||
let sbrPresent = false;
|
||||
let psPresent = false;
|
||||
if (audioObjectType === 5 || audioObjectType === 29) {
|
||||
sbrPresent = true;
|
||||
if (audioObjectType === 29) {
|
||||
psPresent = true; // HE-AAC v2 includes Parametric Stereo
|
||||
}
|
||||
// Skip extension-specific bits if present (simplified here)
|
||||
// In real cases, additional parsing may be needed
|
||||
}
|
||||
|
||||
// Construct the result object
|
||||
const config: AudioSpecificConfigType = {
|
||||
audioObjectType:
|
||||
audioObjectType as AudioSpecificConfigType['audioObjectType'],
|
||||
samplingFrequencyIndex:
|
||||
samplingFrequencyIndex as AudioSpecificConfigType['samplingFrequencyIndex'],
|
||||
channelConfiguration:
|
||||
channelConfiguration as AudioSpecificConfigType['channelConfiguration'],
|
||||
...(sbrPresent && { sbrPresent }),
|
||||
...(psPresent && { psPresent }),
|
||||
};
|
||||
|
||||
// Validate with arktype
|
||||
const validation = AudioSpecificConfigSchema(config);
|
||||
if (validation instanceof ArkErrors) {
|
||||
const error = new ParseCodecError(
|
||||
AAC_CODEC_TYPE,
|
||||
'Invalid AudioSpecificConfig'
|
||||
);
|
||||
error.cause = validation;
|
||||
throw error;
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
export function genCodecIdByAudioSpecificConfig(
|
||||
config: AudioSpecificConfigType
|
||||
) {
|
||||
return `mp4a.40.${config.audioObjectType}`;
|
||||
}
|
||||
|
||||
export function samplesPerFrameByAACAudioObjectType(audioObjectType: number) {
|
||||
switch (audioObjectType) {
|
||||
case 5:
|
||||
case 29:
|
||||
return 2048;
|
||||
case 23:
|
||||
return 512;
|
||||
default:
|
||||
return 1024;
|
||||
}
|
||||
}
|
167
packages/matroska/src/codecs/av1.ts
Normal file
167
packages/matroska/src/codecs/av1.ts
Normal file
@ -0,0 +1,167 @@
|
||||
import { BitReader } from '@konoplayer/core/data';
|
||||
import { type } from 'arktype';
|
||||
import type { TrackEntryType } from '../schema';
|
||||
import { ParseCodecError } from '@konoplayer/core/errors';
|
||||
|
||||
export const AV1_CODEC_TYPE = 'AV1';
|
||||
|
||||
export const AV1DecoderConfigurationRecordSchema = type({
|
||||
marker: type.number, // 1 bit, must be 1
|
||||
version: type.number, // 7 bits, must be 1
|
||||
seqProfile: type.number, // 3 bits, seq profile (0-7)
|
||||
seqLevelIdx0: type.number, // 5 bits, seq level (0-31)
|
||||
seqTier0: type.number, // 1 bit, tier (0 or 1)
|
||||
highBitdepth: type.number, // 1 bit, high or low
|
||||
twelveBit: type.number, // 1 bit, if 12-bit
|
||||
monochrome: type.number, // 1 bit, if mono chrome
|
||||
chromaSubsamplingX: type.number, // 1 bit, sub sampling X
|
||||
chromaSubsamplingY: type.number, // 1 bit, sub sampling Y
|
||||
chromaSamplePosition: type.number, // 2 bits
|
||||
initialPresentationDelayPresent: type.number, // 1 bit
|
||||
initialPresentationDelayMinus1: type.number.optional(), // 4 bits, optoinal
|
||||
configOBUs: type.instanceOf(Uint8Array<ArrayBufferLike>), // remain OBU data
|
||||
});
|
||||
|
||||
export type AV1DecoderConfigurationRecordType =
|
||||
typeof AV1DecoderConfigurationRecordSchema.infer;
|
||||
|
||||
/**
|
||||
* [webkit impl](https://github.com/movableink/webkit/blob/7e43fe7000b319ce68334c09eed1031642099726/Source/WebCore/platform/graphics/AV1Utilities.cpp#L48)
|
||||
*/
|
||||
export function parseAV1DecoderConfigurationRecord(
|
||||
track: TrackEntryType
|
||||
): AV1DecoderConfigurationRecordType {
|
||||
const codecPrivate = track.CodecPrivate;
|
||||
|
||||
if (!codecPrivate) {
|
||||
throw new ParseCodecError(
|
||||
AV1_CODEC_TYPE,
|
||||
'CodecPrivate of AVC Track is missing'
|
||||
);
|
||||
}
|
||||
|
||||
if (codecPrivate.length < 4) {
|
||||
throw new ParseCodecError(
|
||||
AV1_CODEC_TYPE,
|
||||
'Input data too short for AV1DecoderConfigurationRecord'
|
||||
);
|
||||
}
|
||||
|
||||
const reader = new BitReader(codecPrivate);
|
||||
|
||||
// Byte 0
|
||||
const marker = reader.readBits(1);
|
||||
const version = reader.readBits(7);
|
||||
if (marker !== 1 || version !== 1) {
|
||||
throw new ParseCodecError(
|
||||
AV1_CODEC_TYPE,
|
||||
`Invalid marker (${marker}) or version (${version})`
|
||||
);
|
||||
}
|
||||
|
||||
const seqProfile = reader.readBits(3);
|
||||
const seqLevelIdx0 = reader.readBits(5);
|
||||
|
||||
// Byte 1
|
||||
const seqTier0 = reader.readBits(1);
|
||||
const highBitdepth = reader.readBits(1);
|
||||
const twelveBit = reader.readBits(1);
|
||||
const monochrome = reader.readBits(1);
|
||||
const chromaSubsamplingX = reader.readBits(1);
|
||||
const chromaSubsamplingY = reader.readBits(1);
|
||||
const chromaSamplePosition = reader.readBits(2);
|
||||
|
||||
// Byte 2
|
||||
const reserved1 = reader.readBits(3);
|
||||
if (reserved1 !== 0) {
|
||||
throw new ParseCodecError(
|
||||
AV1_CODEC_TYPE,
|
||||
`Reserved bits must be 0, got ${reserved1}`
|
||||
);
|
||||
}
|
||||
const initialPresentationDelayPresent = reader.readBits(1);
|
||||
let initialPresentationDelayMinus1: number | undefined;
|
||||
if (initialPresentationDelayPresent) {
|
||||
initialPresentationDelayMinus1 = reader.readBits(4);
|
||||
} else {
|
||||
const reserved2 = reader.readBits(4);
|
||||
if (reserved2 !== 0) {
|
||||
throw new ParseCodecError(
|
||||
AV1_CODEC_TYPE,
|
||||
`Reserved bits must be 0, got ${reserved2}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// remain bytes as configOBUs
|
||||
const configOBUs = reader.getRemainingBytes();
|
||||
|
||||
return {
|
||||
marker,
|
||||
version,
|
||||
seqProfile,
|
||||
seqLevelIdx0,
|
||||
seqTier0,
|
||||
highBitdepth,
|
||||
twelveBit,
|
||||
monochrome,
|
||||
chromaSubsamplingX,
|
||||
chromaSubsamplingY,
|
||||
chromaSamplePosition,
|
||||
initialPresentationDelayPresent,
|
||||
initialPresentationDelayMinus1,
|
||||
configOBUs,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* [webkit impl](https://github.com/movableink/webkit/blob/7e43fe7000b319ce68334c09eed1031642099726/Source/WebCore/platform/graphics/AV1Utilities.cpp#L197)
|
||||
*/
|
||||
export function genCodecStringByAV1DecoderConfigurationRecord(
|
||||
config: AV1DecoderConfigurationRecordType
|
||||
): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Prefix
|
||||
parts.push('av01');
|
||||
|
||||
// Profile
|
||||
parts.push(config.seqProfile.toString());
|
||||
|
||||
// Level and Tier
|
||||
const levelStr = config.seqLevelIdx0.toString().padStart(2, '0');
|
||||
const tierStr = config.seqTier0 === 0 ? 'M' : 'H';
|
||||
parts.push(`${levelStr}${tierStr}`);
|
||||
|
||||
// Bit Depth
|
||||
let bitDepthStr: string;
|
||||
if (config.highBitdepth === 0) {
|
||||
bitDepthStr = '08'; // 8-bit
|
||||
} else if (config.twelveBit === 0) {
|
||||
bitDepthStr = '10'; // 10-bit
|
||||
} else {
|
||||
bitDepthStr = '12'; // 12-bit
|
||||
}
|
||||
parts.push(bitDepthStr);
|
||||
|
||||
// Monochrome
|
||||
parts.push(config.monochrome.toString());
|
||||
|
||||
// Chroma Subsampling
|
||||
const chromaSubsampling = `${config.chromaSubsamplingX}${config.chromaSubsamplingY}${config.chromaSamplePosition}`;
|
||||
parts.push(chromaSubsampling);
|
||||
|
||||
// Initial Presentation Delay(optional)
|
||||
if (
|
||||
config.initialPresentationDelayPresent === 1 &&
|
||||
config.initialPresentationDelayMinus1 !== undefined
|
||||
) {
|
||||
const delay = (config.initialPresentationDelayMinus1 + 1)
|
||||
.toString()
|
||||
.padStart(2, '0');
|
||||
parts.push(delay);
|
||||
}
|
||||
|
||||
// joined
|
||||
return parts.join('.');
|
||||
}
|
148
packages/matroska/src/codecs/avc.ts
Normal file
148
packages/matroska/src/codecs/avc.ts
Normal file
@ -0,0 +1,148 @@
|
||||
import { ParseCodecError } from '@konoplayer/core/errors';
|
||||
import { type } from 'arktype';
|
||||
import type { TrackEntryType } from '../schema';
|
||||
|
||||
export const AVC_CODEC_TYPE = 'h264(AVC)';
|
||||
|
||||
export const AVCDecoderConfigurationRecordSchema = type({
|
||||
configurationVersion: type.number, // Configuration version, typically 1
|
||||
avcProfileIndication: type.number, // AVC profile
|
||||
profileCompatibility: type.number, // Profile compatibility
|
||||
avcLevelIndication: type.number, // AVC level
|
||||
lengthSizeMinusOne: type.number, // NAL unit length field size minus 1
|
||||
sps: type
|
||||
.instanceOf(Uint8Array<ArrayBufferLike>)
|
||||
.array()
|
||||
.atLeastLength(1), // Sequence Parameter Sets (SPS)
|
||||
pps: type
|
||||
.instanceOf(Uint8Array<ArrayBufferLike>)
|
||||
.array()
|
||||
.atLeastLength(1), // Picture Parameter Sets (PPS)
|
||||
});
|
||||
|
||||
export type AVCDecoderConfigurationRecordType =
|
||||
typeof AVCDecoderConfigurationRecordSchema.infer;
|
||||
|
||||
/**
|
||||
*
|
||||
* @see [webkit](https://github.com/movableink/webkit/blob/7e43fe7000b319ce68334c09eed1031642099726/Source/WebCore/platform/graphics/HEVCUtilities.cpp#L84)
|
||||
*/
|
||||
export function parseAVCDecoderConfigurationRecord(
|
||||
track: TrackEntryType
|
||||
): AVCDecoderConfigurationRecordType {
|
||||
// ISO/IEC 14496-10:2014
|
||||
// 7.3.2.1.1 Sequence parameter set data syntax
|
||||
const codecPrivate = track.CodecPrivate;
|
||||
|
||||
if (!codecPrivate) {
|
||||
throw new ParseCodecError(
|
||||
AVC_CODEC_TYPE,
|
||||
'CodecPrivate of AVC Track is missing'
|
||||
);
|
||||
}
|
||||
|
||||
// AVCDecoderConfigurationRecord is at a minimum 24 bytes long
|
||||
if (codecPrivate.length < 24) {
|
||||
throw new ParseCodecError(
|
||||
AVC_CODEC_TYPE,
|
||||
'Input data too short for AVCDecoderConfigurationRecord'
|
||||
);
|
||||
}
|
||||
|
||||
const view = new DataView(codecPrivate.buffer);
|
||||
let offset = 0;
|
||||
|
||||
const readUint8 = (move: boolean) => {
|
||||
const result = view.getUint8(offset);
|
||||
if (move) {
|
||||
offset += 1;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const readUint16 = (move: boolean) => {
|
||||
const result = view.getUint16(offset, false);
|
||||
if (move) {
|
||||
offset += 2;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const configurationVersion = readUint8(true);
|
||||
const avcProfileIndication = readUint8(true);
|
||||
const profileCompatibility = readUint8(true);
|
||||
const avcLevelIndication = readUint8(true);
|
||||
|
||||
// Read lengthSizeMinusOne (first 6 bits are reserved, typically 0xFF, last 2 bits are the value)
|
||||
const lengthSizeMinusOne = readUint8(true) & 0x03;
|
||||
|
||||
// Read number of SPS (first 3 bits are reserved, typically 0xE0, last 5 bits are SPS count)
|
||||
const numOfSPS = readUint8(true) & 0x1f;
|
||||
const sps: Uint8Array[] = [];
|
||||
|
||||
// Parse SPS
|
||||
for (let i = 0; i < numOfSPS; i++) {
|
||||
if (offset + 2 > codecPrivate.length) {
|
||||
throw new ParseCodecError(AVC_CODEC_TYPE, 'Invalid SPS length');
|
||||
}
|
||||
|
||||
const spsLength = readUint16(true);
|
||||
|
||||
if (offset + spsLength > codecPrivate.length) {
|
||||
throw new ParseCodecError(
|
||||
AVC_CODEC_TYPE,
|
||||
'SPS data exceeds buffer length'
|
||||
);
|
||||
}
|
||||
|
||||
sps.push(codecPrivate.subarray(offset, offset + spsLength));
|
||||
offset += spsLength;
|
||||
}
|
||||
|
||||
// Read number of PPS
|
||||
if (offset >= codecPrivate.length) {
|
||||
throw new ParseCodecError(AVC_CODEC_TYPE, 'No space for PPS count');
|
||||
}
|
||||
const numOfPPS = readUint8(true);
|
||||
const pps: Uint8Array[] = [];
|
||||
|
||||
// Parse PPS
|
||||
for (let i = 0; i < numOfPPS; i++) {
|
||||
if (offset + 2 > codecPrivate.length) {
|
||||
throw new ParseCodecError(AVC_CODEC_TYPE, 'Invalid PPS length');
|
||||
}
|
||||
|
||||
const ppsLength = readUint16(true);
|
||||
|
||||
if (offset + ppsLength > codecPrivate.length) {
|
||||
throw new ParseCodecError(
|
||||
AVC_CODEC_TYPE,
|
||||
'PPS data exceeds buffer length'
|
||||
);
|
||||
}
|
||||
|
||||
pps.push(codecPrivate.subarray(offset, offset + ppsLength));
|
||||
offset += ppsLength;
|
||||
}
|
||||
|
||||
return {
|
||||
configurationVersion,
|
||||
avcProfileIndication,
|
||||
profileCompatibility,
|
||||
avcLevelIndication,
|
||||
lengthSizeMinusOne,
|
||||
sps,
|
||||
pps,
|
||||
};
|
||||
}
|
||||
|
||||
export function genCodecStringByAVCDecoderConfigurationRecord(
|
||||
config: AVCDecoderConfigurationRecordType
|
||||
): string {
|
||||
const profileHex = config.avcProfileIndication.toString(16).padStart(2, '0');
|
||||
const profileCompatHex = config.profileCompatibility
|
||||
.toString(16)
|
||||
.padStart(2, '0');
|
||||
const levelHex = config.avcLevelIndication.toString(16).padStart(2, '0');
|
||||
return `avc1.${profileHex}${profileCompatHex}${levelHex}`;
|
||||
}
|
214
packages/matroska/src/codecs/hevc.ts
Normal file
214
packages/matroska/src/codecs/hevc.ts
Normal file
@ -0,0 +1,214 @@
|
||||
import { ParseCodecError } from '@konoplayer/core/errors';
|
||||
import { ArkErrors, type } from 'arktype';
|
||||
import type { TrackEntryType } from '../schema';
|
||||
|
||||
export const HEVC_CODEC_TYPE = 'h265(HEVC)';
|
||||
|
||||
export const HEVCDecoderConfigurationRecordArraySchema = type({
|
||||
arrayCompleteness: type.number,
|
||||
nalUnitType: type.number,
|
||||
numNalus: type.number,
|
||||
nalUnit: type.instanceOf(Uint8Array<ArrayBufferLike>).array(),
|
||||
});
|
||||
|
||||
export type HEVCDecoderConfigurationRecordArrayType =
|
||||
typeof HEVCDecoderConfigurationRecordArraySchema.infer;
|
||||
|
||||
// Define the schema for HEVCDecoderConfigurationRecord
|
||||
export const HEVCDecoderConfigurationRecordSchema = type({
|
||||
configurationVersion: type.number, // Must be 1
|
||||
generalProfileSpace: type.number,
|
||||
generalTierFlag: type.number,
|
||||
generalProfileIdc: type.number,
|
||||
generalProfileCompatibilityFlags: type.number,
|
||||
generalConstraintIndicatorFlags: type.number,
|
||||
generalLevelIdc: type.number,
|
||||
minSpatialSegmentationIdc: type.number,
|
||||
parallelismType: type.number,
|
||||
chromaFormat: type.number,
|
||||
bitDepthLumaMinus8: type.number,
|
||||
bitDepthChromaMinus8: type.number,
|
||||
avgFrameRate: type.number,
|
||||
constantFrameRate: type.number,
|
||||
numTemporalLayers: type.number,
|
||||
temporalIdNested: type.number,
|
||||
lengthSizeMinusOne: type.number,
|
||||
numOfArrays: type.number,
|
||||
nalUnits: HEVCDecoderConfigurationRecordArraySchema.array(),
|
||||
});
|
||||
|
||||
export type HEVCDecoderConfigurationRecordType =
|
||||
typeof HEVCDecoderConfigurationRecordSchema.infer;
|
||||
|
||||
export function parseHEVCDecoderConfigurationRecord(
|
||||
track: TrackEntryType
|
||||
): HEVCDecoderConfigurationRecordType {
|
||||
const codecPrivate = track.CodecPrivate;
|
||||
if (!codecPrivate) {
|
||||
throw new ParseCodecError(
|
||||
HEVC_CODEC_TYPE,
|
||||
'CodecPrivate of HEVC Track is missing'
|
||||
);
|
||||
}
|
||||
const view = new DataView(codecPrivate.buffer);
|
||||
let offset = 0;
|
||||
|
||||
const readUint8 = (move: boolean) => {
|
||||
const result = view.getUint8(offset);
|
||||
if (move) {
|
||||
offset += 1;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const readUint16 = (move: boolean) => {
|
||||
const result = view.getUint16(offset, false);
|
||||
if (move) {
|
||||
offset += 2;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const readUint48 = (move: boolean) => {
|
||||
const result =
|
||||
view.getUint16(offset, false) * 2 ** 32 +
|
||||
view.getUint32(offset + 2, false);
|
||||
if (move) {
|
||||
offset += 6;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const readUint32 = (move: boolean) => {
|
||||
const result = view.getUint32(offset, false);
|
||||
if (move) {
|
||||
offset += 4;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// Read and validate basic fields
|
||||
const config: HEVCDecoderConfigurationRecordType = {
|
||||
configurationVersion: readUint8(true),
|
||||
generalProfileSpace: (readUint8(false) & 0xc0) >> 6,
|
||||
generalTierFlag: (readUint8(false) & 0x20) >> 5,
|
||||
generalProfileIdc: readUint8(true) & 0x1f,
|
||||
generalProfileCompatibilityFlags: readUint32(true),
|
||||
generalConstraintIndicatorFlags: readUint48(true),
|
||||
generalLevelIdc: readUint8(true),
|
||||
minSpatialSegmentationIdc: readUint16(true) & 0x0fff,
|
||||
parallelismType: readUint8(true) & 0x03,
|
||||
chromaFormat: readUint8(true) & 0x03,
|
||||
bitDepthLumaMinus8: readUint8(true) & 0x07,
|
||||
bitDepthChromaMinus8: readUint8(true) & 0x07,
|
||||
avgFrameRate: readUint16(true),
|
||||
constantFrameRate: (readUint8(false) & 0xc0) >> 6,
|
||||
numTemporalLayers: (readUint8(false) & 0x38) >> 3,
|
||||
temporalIdNested: (readUint8(false) & 0x04) >> 2,
|
||||
lengthSizeMinusOne: readUint8(true) & 0x03,
|
||||
numOfArrays: readUint8(true),
|
||||
nalUnits: [],
|
||||
};
|
||||
|
||||
// Parse NAL unit arrays
|
||||
const arrays = config.nalUnits;
|
||||
|
||||
for (let i = 0; i < config.numOfArrays; i++) {
|
||||
const array: HEVCDecoderConfigurationRecordArrayType = {
|
||||
arrayCompleteness: (readUint8(false) & 0x80) >> 7,
|
||||
nalUnitType: readUint8(true) & 0x3f,
|
||||
numNalus: readUint16(true),
|
||||
nalUnit: [] as Uint8Array<ArrayBufferLike>[],
|
||||
};
|
||||
|
||||
for (let j = 0; j < array.numNalus; j++) {
|
||||
const nalUnitLength = readUint16(true);
|
||||
array.nalUnit.push(codecPrivate.subarray(offset, offset + nalUnitLength));
|
||||
offset += nalUnitLength;
|
||||
}
|
||||
arrays.push(array);
|
||||
}
|
||||
|
||||
// Validate using arktype
|
||||
const validation = HEVCDecoderConfigurationRecordSchema(config);
|
||||
if (validation instanceof ArkErrors) {
|
||||
const error = new ParseCodecError(
|
||||
HEVC_CODEC_TYPE,
|
||||
'Invalid HEVC configuration record'
|
||||
);
|
||||
error.cause = validation;
|
||||
throw error;
|
||||
}
|
||||
|
||||
return validation;
|
||||
}
|
||||
|
||||
function reverseBits32(value: number): number {
|
||||
let result = 0;
|
||||
for (let i = 0; i < 32; i++) {
|
||||
result = (result << 1) | ((value >> i) & 1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see[webkit implementation](https://github.com/movableink/webkit/blob/7e43fe7000b319ce68334c09eed1031642099726/Source/WebCore/platform/graphics/HEVCUtilities.cpp#L204)
|
||||
*/
|
||||
export function genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
config: HEVCDecoderConfigurationRecordType
|
||||
) {
|
||||
const result: string[] = [];
|
||||
|
||||
// prefix
|
||||
result.push(`hev${config.configurationVersion}`);
|
||||
|
||||
// Profile Space
|
||||
if (config.generalProfileSpace > 0) {
|
||||
const profileSpaceChar = String.fromCharCode(
|
||||
'A'.charCodeAt(0) + config.generalProfileSpace - 1
|
||||
);
|
||||
result.push(profileSpaceChar + config.generalProfileIdc.toString());
|
||||
} else {
|
||||
result.push(config.generalProfileIdc.toString());
|
||||
}
|
||||
|
||||
// Profile Compatibility Flags
|
||||
const compatFlags = reverseBits32(config.generalProfileCompatibilityFlags)
|
||||
.toString(16)
|
||||
.toUpperCase();
|
||||
result.push(compatFlags);
|
||||
|
||||
// Tier Flag and Level IDC
|
||||
const tierPrefix = config.generalTierFlag ? 'H' : 'L';
|
||||
result.push(tierPrefix + config.generalLevelIdc.toString());
|
||||
|
||||
// Constraint Indicator Flags
|
||||
let constraintBytes: number[];
|
||||
if (Array.isArray(config.generalConstraintIndicatorFlags)) {
|
||||
constraintBytes = config.generalConstraintIndicatorFlags as number[];
|
||||
} else {
|
||||
// split 48 bit integer into 6 byte
|
||||
const flags = BigInt(config.generalConstraintIndicatorFlags);
|
||||
constraintBytes = [];
|
||||
for (let i = 5; i >= 0; i--) {
|
||||
constraintBytes.push(Number((flags >> BigInt(8 * i)) & BigInt(0xff)));
|
||||
}
|
||||
}
|
||||
|
||||
// find last non-zero byte
|
||||
const lastNonZeroIndex = constraintBytes.reduce(
|
||||
(last, byte, i) => (byte ? i : last),
|
||||
-1
|
||||
);
|
||||
if (lastNonZeroIndex >= 0) {
|
||||
for (let i = 0; i <= lastNonZeroIndex; i++) {
|
||||
const byteHex = constraintBytes[i]
|
||||
.toString(16)
|
||||
.padStart(2, '0')
|
||||
.toUpperCase();
|
||||
result.push(byteHex);
|
||||
}
|
||||
}
|
||||
|
||||
return result.join('.');
|
||||
}
|
436
packages/matroska/src/codecs/index.ts
Normal file
436
packages/matroska/src/codecs/index.ts
Normal file
@ -0,0 +1,436 @@
|
||||
import {
|
||||
ParseCodecError,
|
||||
UnsupportedCodecError,
|
||||
} from '@konoplayer/core/errors';
|
||||
import { VideoCodec, AudioCodec } from '@konoplayer/core/codecs';
|
||||
import type { TrackEntryType } from '../schema';
|
||||
import {
|
||||
genCodecIdByAudioSpecificConfig,
|
||||
parseAudioSpecificConfig,
|
||||
samplesPerFrameByAACAudioObjectType,
|
||||
} from './aac';
|
||||
import {
|
||||
genCodecStringByAVCDecoderConfigurationRecord,
|
||||
parseAVCDecoderConfigurationRecord,
|
||||
} from './avc';
|
||||
import {
|
||||
genCodecStringByAV1DecoderConfigurationRecord,
|
||||
parseAV1DecoderConfigurationRecord,
|
||||
} from './av1';
|
||||
import {
|
||||
genCodecStringByHEVCDecoderConfigurationRecord,
|
||||
parseHEVCDecoderConfigurationRecord,
|
||||
} from './hevc';
|
||||
import {
|
||||
genCodecStringByVP9DecoderConfigurationRecord,
|
||||
parseVP9DecoderConfigurationRecord,
|
||||
VP9_CODEC_TYPE,
|
||||
} from './vp9';
|
||||
|
||||
export const VideoCodecId = {
|
||||
VCM: 'V_MS/VFW/FOURCC',
|
||||
UNCOMPRESSED: 'V_UNCOMPRESSED',
|
||||
MPEG4_ISO_SP: 'V_MPEG4/ISO/SP',
|
||||
MPEG4_ISO_ASP: 'V_MPEG4/ISO/ASP',
|
||||
MPEG4_ISO_AP: 'V_MPEG4/ISO/AP',
|
||||
MPEG4_MS_V3: 'V_MPEG4/MS/V3',
|
||||
MPEG1: 'V_MPEG1',
|
||||
MPEG2: 'V_MPEG2',
|
||||
H264: 'V_MPEG4/ISO/AVC',
|
||||
HEVC: 'V_MPEGH/ISO/HEVC',
|
||||
AVS2: 'V_AVS2',
|
||||
AVS3: 'V_AVS3',
|
||||
RV10: 'V_REAL/RV10',
|
||||
RV20: 'V_REAL/RV20',
|
||||
RV30: 'V_REAL/RV30',
|
||||
RV40: 'V_REAL/RV40',
|
||||
QUICKTIME: 'V_QUICKTIME',
|
||||
THEORA: 'V_THEORA',
|
||||
PROPRES: 'V_PRORES',
|
||||
VP8: 'V_VP8',
|
||||
VP9: 'V_VP9',
|
||||
FFV1: 'V_FFV1',
|
||||
AV1: 'V_AV1',
|
||||
} as const;
|
||||
|
||||
export type VideoCodecIdType =
|
||||
| `${(typeof VideoCodecId)[keyof typeof VideoCodecId]}`
|
||||
| string;
|
||||
|
||||
export const AudioCodecId = {
|
||||
MPEG_L3: 'A_MPEG/L3',
|
||||
MPEG_L2: 'A_MPEG/L2',
|
||||
MPEG_L1: 'A_MPEG/L1',
|
||||
PCM_INT_BIG: 'A_PCM/INT/BIG',
|
||||
PCM_INT_LIT: 'A_PCM/INT/LIT',
|
||||
PCM_FLOAT_IEEE: 'A_PCM/FLOAT/IEEE',
|
||||
MPC: 'A_MPC',
|
||||
AC3: 'A_AC3',
|
||||
AC3_BSID9: 'A_AC3/BSID9',
|
||||
AC3_BSID10: 'A_AC3/BSID10',
|
||||
ALAC: 'A_ALAC',
|
||||
DTS: 'A_DTS',
|
||||
DTS_EXPRESS: 'A_DTS/EXPRESS',
|
||||
DTS_LOSSLESS: 'A_DTS/LOSSLESS',
|
||||
VORBIS: 'A_VORBIS',
|
||||
OPUS: 'A_OPUS',
|
||||
FLAC: 'A_FLAC',
|
||||
EAC3: 'A_EAC3',
|
||||
REAL_14_4: 'A_REAL/14_4',
|
||||
REAL_28_8: 'A_REAL/28_8',
|
||||
REAL_COOK: 'A_REAL/COOK',
|
||||
REAL_SIPR: 'A_REAL/SIPR',
|
||||
REAL_RALF: 'A_REAL/RALF',
|
||||
REAL_ATRC: 'A_REAL/ATRC',
|
||||
MS_ACM: 'A_MS/ACM',
|
||||
AAC: 'A_AAC',
|
||||
AAC_MPEG2_MAIN: 'A_AAC/MPEG2/MAIN',
|
||||
AAC_MPEG2_LC: 'A_AAC/MPEG2/LC',
|
||||
AAC_MPEG2_LC_SBR: 'A_AAC/MPEG2/LC/SBR',
|
||||
AAC_MPEG2_SSR: 'A_AAC/MPEG2/SSR',
|
||||
AAC_MPEG4_MAIN: 'A_AAC/MPEG4/MAIN',
|
||||
AAC_MPEG4_LC: 'A_AAC/MPEG4/LC',
|
||||
AAC_MPEG4_SBR: 'A_AAC/MPEG4/LC/SBR',
|
||||
AAC_MPEG4_SSR: 'A_AAC/MPEG4/SSR',
|
||||
AAC_MPEG4_LTP: 'A_AAC/MPEG4/LTP',
|
||||
QUICKTIME: 'A_QUICKTIME',
|
||||
QDMC: 'A_QUICKTIME/QDMC',
|
||||
QDM2: 'A_QUICKTIME/QDM2',
|
||||
TTA1: 'A_TTA1',
|
||||
WAVEPACK4: 'A_WAVPACK4',
|
||||
ATRAC: 'A_ATRAC/AT1',
|
||||
} as const;
|
||||
|
||||
export type AudioCodecIdType =
|
||||
| `${(typeof AudioCodecId)[keyof typeof AudioCodecId]}`
|
||||
| string;
|
||||
|
||||
export const SubtitleCodecId = {
|
||||
UTF8: 'S_TEXT/UTF8',
|
||||
SSA: 'S_TEXT/SSA',
|
||||
ASS: 'S_TEXT/ASS',
|
||||
WEBVTT: 'S_TEXT/WEBVTT',
|
||||
BMP: 'S_IMAGE/BMP',
|
||||
DVBSUB: 'S_DVBSUB',
|
||||
VOBSUB: 'S_VOBSUB',
|
||||
HDMV_PGS: 'S_HDMV/PGS',
|
||||
HDMV_TEXTST: 'S_HDMV/TEXTST',
|
||||
KATE: 'S_KATE',
|
||||
ARIBSUB: 'S_ARIBSUB',
|
||||
} as const;
|
||||
|
||||
export type SubtitleCodecIdType =
|
||||
| `${(typeof SubtitleCodecId)[keyof typeof SubtitleCodecId]}`
|
||||
| string;
|
||||
|
||||
export interface VideoDecoderConfigExt extends VideoDecoderConfig {
|
||||
codecType: VideoCodec;
|
||||
}
|
||||
|
||||
export function videoCodecIdRequirePeekingKeyframe(codecId: VideoCodecIdType) {
|
||||
return codecId === VideoCodecId.VP9;
|
||||
}
|
||||
|
||||
export function videoCodecIdToWebCodecs(
|
||||
track: TrackEntryType,
|
||||
keyframe: Uint8Array | undefined
|
||||
): VideoDecoderConfigExt {
|
||||
const codecId = track.CodecID;
|
||||
const codecPrivate = track.CodecPrivate;
|
||||
const shareOptions = {
|
||||
description: codecPrivate,
|
||||
};
|
||||
switch (codecId) {
|
||||
case VideoCodecId.HEVC:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.HEVC,
|
||||
codec: genCodecStringByHEVCDecoderConfigurationRecord(
|
||||
parseHEVCDecoderConfigurationRecord(track)
|
||||
),
|
||||
};
|
||||
case VideoCodecId.VP9:
|
||||
if (!keyframe) {
|
||||
throw new ParseCodecError(
|
||||
VP9_CODEC_TYPE,
|
||||
'keyframe is required to parse VP9 codec'
|
||||
);
|
||||
}
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.VP9,
|
||||
codec: genCodecStringByVP9DecoderConfigurationRecord(
|
||||
parseVP9DecoderConfigurationRecord(track, keyframe)
|
||||
),
|
||||
};
|
||||
case VideoCodecId.AV1:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.AV1,
|
||||
codec: genCodecStringByAV1DecoderConfigurationRecord(
|
||||
parseAV1DecoderConfigurationRecord(track)
|
||||
),
|
||||
};
|
||||
case VideoCodecId.H264:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.H264,
|
||||
codec: genCodecStringByAVCDecoderConfigurationRecord(
|
||||
parseAVCDecoderConfigurationRecord(track)
|
||||
),
|
||||
};
|
||||
case VideoCodecId.THEORA:
|
||||
return { ...shareOptions, codecType: VideoCodec.Theora, codec: 'theora' };
|
||||
case VideoCodecId.VP8:
|
||||
return { ...shareOptions, codecType: VideoCodec.VP8, codec: 'vp8' };
|
||||
case VideoCodecId.MPEG4_ISO_SP:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.MPEG4,
|
||||
codec: 'mp4v.01.3',
|
||||
};
|
||||
case VideoCodecId.MPEG4_ISO_ASP:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.MPEG4,
|
||||
codec: 'mp4v.20.9',
|
||||
};
|
||||
case VideoCodecId.MPEG4_ISO_AP:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: VideoCodec.MPEG4,
|
||||
codec: 'mp4v.20.9',
|
||||
};
|
||||
default:
|
||||
throw new UnsupportedCodecError(codecId, 'web codecs video decoder');
|
||||
}
|
||||
}
|
||||
|
||||
export interface AudioDecoderConfigExt extends AudioDecoderConfig {
|
||||
codecType: AudioCodec;
|
||||
samplesPerFrame?: number;
|
||||
}
|
||||
|
||||
export function isAudioCodecIdRequirePeekingKeyframe(_track: TrackEntryType) {
|
||||
return false;
|
||||
}
|
||||
|
||||
export function audioCodecIdToWebCodecs(
|
||||
track: TrackEntryType,
|
||||
_keyframe: Uint8Array | undefined
|
||||
): AudioDecoderConfigExt {
|
||||
const codecId = track.CodecID;
|
||||
const codecPrivate = track.CodecPrivate;
|
||||
const bitDepth = track.Audio?.BitDepth;
|
||||
const numberOfChannels = Number(track.Audio?.Channels);
|
||||
const sampleRate = Number(track.Audio?.SamplingFrequency);
|
||||
|
||||
const shareOptions = {
|
||||
numberOfChannels,
|
||||
sampleRate,
|
||||
description: codecPrivate,
|
||||
};
|
||||
|
||||
switch (track.CodecID) {
|
||||
case AudioCodecId.AAC_MPEG4_MAIN:
|
||||
case AudioCodecId.AAC_MPEG2_MAIN:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.1',
|
||||
samplesPerFrame: 1024,
|
||||
};
|
||||
case AudioCodecId.AAC_MPEG2_LC:
|
||||
case AudioCodecId.AAC_MPEG4_LC:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.2',
|
||||
samplesPerFrame: 1024,
|
||||
};
|
||||
case AudioCodecId.AAC_MPEG2_SSR:
|
||||
case AudioCodecId.AAC_MPEG4_SSR:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.3',
|
||||
samplesPerFrame: 1024,
|
||||
};
|
||||
case AudioCodecId.AAC_MPEG4_LTP:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.4',
|
||||
samplesPerFrame: 1024,
|
||||
};
|
||||
case AudioCodecId.AAC_MPEG2_LC_SBR:
|
||||
case AudioCodecId.AAC_MPEG4_SBR:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.5',
|
||||
samplesPerFrame: 2048,
|
||||
};
|
||||
case AudioCodecId.AAC:
|
||||
if (codecPrivate) {
|
||||
const config = parseAudioSpecificConfig(codecPrivate);
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: genCodecIdByAudioSpecificConfig(config),
|
||||
samplesPerFrame: samplesPerFrameByAACAudioObjectType(
|
||||
config.audioObjectType
|
||||
),
|
||||
};
|
||||
}
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AAC,
|
||||
codec: 'mp4a.40.2',
|
||||
samplesPerFrame: 1024,
|
||||
};
|
||||
case AudioCodecId.AC3:
|
||||
case AudioCodecId.AC3_BSID9:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.AC3,
|
||||
codec: 'ac-3',
|
||||
samplesPerFrame: 1536,
|
||||
};
|
||||
case AudioCodecId.EAC3:
|
||||
case AudioCodecId.AC3_BSID10:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.EAC3,
|
||||
codec: 'ec-3',
|
||||
// TODO: FIXME
|
||||
// parse frame header
|
||||
// samples per frame = numblkscod * 256
|
||||
// most time numblkscod = 6
|
||||
// samplesPerFrame: 1536,
|
||||
};
|
||||
case AudioCodecId.MPEG_L3:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.MP3,
|
||||
codec: 'mp3',
|
||||
samplesPerFrame: 1152,
|
||||
};
|
||||
case AudioCodecId.VORBIS:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.Vorbis,
|
||||
codec: 'vorbis',
|
||||
/**
|
||||
* TODO: FIXME
|
||||
* read code private
|
||||
* prase setup header
|
||||
* ShortBlockSize = 2 ^ blocksize_0
|
||||
* LongBlockSize = 2 ^ blocksize_1
|
||||
*/
|
||||
samplesPerFrame: 2048,
|
||||
};
|
||||
case AudioCodecId.FLAC:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.FLAC,
|
||||
codec: 'flac',
|
||||
/**
|
||||
* TODO: FIXME
|
||||
* read code private
|
||||
* get block size
|
||||
*/
|
||||
// samplesPerFrame: 4096,
|
||||
};
|
||||
case AudioCodecId.OPUS:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.Opus,
|
||||
codec: 'opus',
|
||||
/**
|
||||
* TODO: FIXME
|
||||
* Read TOC header from frame data
|
||||
*/
|
||||
// samplesPerFrame: 960,
|
||||
};
|
||||
case AudioCodecId.ALAC:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.ALAC,
|
||||
codec: 'alac',
|
||||
/**
|
||||
* TODO: FIXME
|
||||
* parse private data and get frame length
|
||||
* 00 00 10 00 // Frame Length (4096)
|
||||
00 00 00 00 // Compatible Version (0)
|
||||
00 10 // Bit Depth (16-bit)
|
||||
40 00 // PB (like 40)
|
||||
00 00 // MB (like 0)
|
||||
00 00 // KB (like 0)
|
||||
00 02 // Channels (2)
|
||||
00 00 AC 44 // Sample Rate (44100Hz)
|
||||
*/
|
||||
// samplesPerFrame: 4096,
|
||||
};
|
||||
case AudioCodecId.PCM_INT_BIG:
|
||||
if (bitDepth === 16) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S16BE,
|
||||
codec: 'pcm-s16be',
|
||||
};
|
||||
}
|
||||
if (bitDepth === 24) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S24BE,
|
||||
codec: 'pcm-s24be',
|
||||
};
|
||||
}
|
||||
if (bitDepth === 32) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S32BE,
|
||||
codec: 'pcm-s32be',
|
||||
};
|
||||
}
|
||||
throw new UnsupportedCodecError(
|
||||
`${codecId}(${bitDepth}b)`,
|
||||
'web codecs audio decoder'
|
||||
);
|
||||
case AudioCodecId.PCM_INT_LIT:
|
||||
if (bitDepth === 16) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S16LE,
|
||||
codec: 'pcm-s16le',
|
||||
};
|
||||
}
|
||||
if (bitDepth === 24) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S24LE,
|
||||
codec: 'pcm-s24le',
|
||||
};
|
||||
}
|
||||
if (bitDepth === 32) {
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_S32LE,
|
||||
codec: 'pcm-s32le',
|
||||
};
|
||||
}
|
||||
throw new UnsupportedCodecError(
|
||||
`${codecId}(${bitDepth}b)`,
|
||||
'web codecs audio decoder'
|
||||
);
|
||||
case AudioCodecId.PCM_FLOAT_IEEE:
|
||||
return {
|
||||
...shareOptions,
|
||||
codecType: AudioCodec.PCM_F32LE,
|
||||
codec: 'pcm-f32le',
|
||||
};
|
||||
default:
|
||||
throw new UnsupportedCodecError(codecId, 'web codecs audio decoder');
|
||||
}
|
||||
}
|
232
packages/matroska/src/codecs/vp9.ts
Normal file
232
packages/matroska/src/codecs/vp9.ts
Normal file
@ -0,0 +1,232 @@
|
||||
import { type } from 'arktype';
|
||||
import type { TrackEntryType } from '../schema';
|
||||
import { BitReader } from '@konoplayer/core/data';
|
||||
import { ParseCodecError } from '@konoplayer/core/errors';
|
||||
|
||||
export const VP9_CODEC_TYPE = 'vp9';
|
||||
|
||||
export enum VP9ColorSpaceEnum {
|
||||
UNKNOWN = 0,
|
||||
BT_601 = 1, // eq bt_470bg
|
||||
BT_709 = 2,
|
||||
SMPTE_170 = 3,
|
||||
SMPTE_240 = 4,
|
||||
BT_2020 = 5,
|
||||
RESERVED = 6,
|
||||
SRGB = 7,
|
||||
}
|
||||
|
||||
export enum VP9YUVRange {
|
||||
STUDIO_SWING = 0,
|
||||
FULL_SWING = 1,
|
||||
}
|
||||
|
||||
export enum VP9Subsampling {
|
||||
UNKNOWN = 0,
|
||||
YUV420 = 1,
|
||||
YUV422 = 2,
|
||||
YUV440 = 3,
|
||||
YUV444 = 4,
|
||||
}
|
||||
|
||||
export const VP9PerformenceLevel = [
|
||||
{ level: '10', maxSampleRate: 829440, maxResolution: 36864 }, // Level 1
|
||||
{ level: '11', maxSampleRate: 2764800, maxResolution: 73728 }, // Level 1
|
||||
{ level: '20', maxSampleRate: 4608000, maxResolution: 122880 }, // Level 2
|
||||
{ level: '21', maxSampleRate: 9216000, maxResolution: 245760 }, // Level 2.1
|
||||
{ level: '30', maxSampleRate: 20736000, maxResolution: 552960 }, // Level 3
|
||||
{ level: '31', maxSampleRate: 36864000, maxResolution: 983040 }, // Level 3.1
|
||||
{ level: '40', maxSampleRate: 83558400, maxResolution: 2228224 }, // Level 4
|
||||
{ level: '41', maxSampleRate: 160432128, maxResolution: 2228224 }, // Level 4.1
|
||||
{ level: '50', maxSampleRate: 311951360, maxResolution: 8912896 }, // Level 5
|
||||
{ level: '51', maxSampleRate: 588251136, maxResolution: 8912896 }, // Level 5.1
|
||||
{ level: '52', maxSampleRate: 1176502272, maxResolution: 8912896 }, // Level 5.2
|
||||
{ level: '60', maxSampleRate: 1176502272, maxResolution: 35651584 }, // Level 6
|
||||
{ level: '61', maxSampleRate: 2353004544, maxResolution: 35651584 }, // Level 6.1
|
||||
{ level: '62', maxSampleRate: 4706009088, maxResolution: 35651584 }, // Level 6.2
|
||||
];
|
||||
|
||||
export const VP9DecoderConfigurationRecordSchema = type({
|
||||
profile: type.number, // 0 | 1 | 2 | 3,
|
||||
bitDepth: type.number, // 8 | 10 | 12
|
||||
colorSpace: type.number,
|
||||
subsampling: type.number, // 420 | 422 | 444
|
||||
width: type.number,
|
||||
height: type.number,
|
||||
yuvRangeFlag: type.number.optional(),
|
||||
hasScaling: type.boolean,
|
||||
renderWidth: type.number,
|
||||
renderHeight: type.number,
|
||||
frameRate: type.number, // frame per second
|
||||
estimateLevel: type.string,
|
||||
});
|
||||
|
||||
export type VP9DecoderConfigurationRecordType =
|
||||
typeof VP9DecoderConfigurationRecordSchema.infer;
|
||||
|
||||
export function parseVP9DecoderConfigurationRecord(
|
||||
track: TrackEntryType,
|
||||
keyframe: Uint8Array
|
||||
): VP9DecoderConfigurationRecordType {
|
||||
const reader = new BitReader(keyframe);
|
||||
const frameRate = 1_000_000_000 / Number(track.DefaultDuration) || 30;
|
||||
|
||||
// Frame Marker: 2 bits, must be 0b10
|
||||
const frameMarker = reader.readBits(2);
|
||||
if (frameMarker !== 2) {
|
||||
throw new ParseCodecError(VP9_CODEC_TYPE, 'invalid frame marker');
|
||||
}
|
||||
|
||||
// Profile: 2 bits
|
||||
const version = reader.readBits(1);
|
||||
const high = reader.readBits(1);
|
||||
|
||||
const profile = (high << 1) + version;
|
||||
|
||||
let reservedZero = 0;
|
||||
if (profile === 3) {
|
||||
reservedZero = reader.readBits(1);
|
||||
if (reservedZero !== 0) {
|
||||
throw new ParseCodecError(
|
||||
VP9_CODEC_TYPE,
|
||||
'Invalid reserved zero bit for profile 3'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Show Existing Frame: 1 bit
|
||||
const showExistingFrame = reader.readBits(1);
|
||||
if (showExistingFrame === 1) {
|
||||
throw new ParseCodecError(VP9_CODEC_TYPE, 'not a keyframe to parse');
|
||||
}
|
||||
|
||||
// Frame Type: 1 bit (0 = keyframe)
|
||||
const frameType = reader.readBits(1);
|
||||
if (frameType !== 0) {
|
||||
throw new ParseCodecError(VP9_CODEC_TYPE, 'not a keyframe to parse');
|
||||
}
|
||||
|
||||
// Show Frame and Error Resilient
|
||||
const _showFrame = reader.readBits(1);
|
||||
const _errorResilient = reader.readBits(1);
|
||||
|
||||
// Sync Code: 3 bytes (0x49, 0x83, 0x42)
|
||||
const syncCode =
|
||||
(reader.readBits(8) << 16) | (reader.readBits(8) << 8) | reader.readBits(8);
|
||||
if (syncCode !== 0x498342) {
|
||||
throw new ParseCodecError(VP9_CODEC_TYPE, 'Invalid sync code');
|
||||
}
|
||||
|
||||
// Bit Depth
|
||||
let bitDepth: number;
|
||||
if (profile >= 2) {
|
||||
const tenOrTwelveBit = reader.readBits(1);
|
||||
bitDepth = tenOrTwelveBit === 0 ? 10 : 12;
|
||||
} else {
|
||||
bitDepth = 8;
|
||||
}
|
||||
|
||||
const colorSpace = reader.readBits(3);
|
||||
|
||||
let subsamplingX: number;
|
||||
let subsamplingY: number;
|
||||
let yuvRangeFlag: number | undefined;
|
||||
if (colorSpace !== VP9ColorSpaceEnum.SRGB) {
|
||||
yuvRangeFlag = reader.readBits(1);
|
||||
if (profile === 1 || profile === 3) {
|
||||
subsamplingX = reader.readBits(1);
|
||||
subsamplingY = reader.readBits(1);
|
||||
reservedZero = reader.readBits(1);
|
||||
} else {
|
||||
subsamplingX = 1;
|
||||
subsamplingY = 1;
|
||||
}
|
||||
} else {
|
||||
if (profile !== 1 && profile !== 3) {
|
||||
throw new ParseCodecError(
|
||||
VP9_CODEC_TYPE,
|
||||
'VP9 profile with sRGB ColorSpace must be 1 or 3'
|
||||
);
|
||||
}
|
||||
subsamplingX = 0;
|
||||
subsamplingY = 0;
|
||||
reservedZero = reader.readBits(1);
|
||||
}
|
||||
|
||||
let subsampling: VP9Subsampling;
|
||||
|
||||
if (!subsamplingX && subsamplingY) {
|
||||
subsampling = VP9Subsampling.YUV440;
|
||||
} else if (subsamplingX && !subsamplingY) {
|
||||
subsampling = VP9Subsampling.YUV422;
|
||||
} else if (subsamplingX && subsamplingY) {
|
||||
subsampling = VP9Subsampling.YUV420;
|
||||
} else if (!subsamplingX && !subsamplingY) {
|
||||
subsampling = VP9Subsampling.YUV444;
|
||||
} else {
|
||||
subsampling = VP9Subsampling.UNKNOWN;
|
||||
}
|
||||
|
||||
// Frame Size (resolution)
|
||||
const widthMinus1 = reader.readBits(16);
|
||||
const heightMinus1 = reader.readBits(16);
|
||||
const hasScaling = !!reader.readBits(1);
|
||||
let renderWidthMinus1 = widthMinus1;
|
||||
let renderHeightMinus1 = heightMinus1;
|
||||
if (hasScaling) {
|
||||
renderWidthMinus1 = reader.readBits(16);
|
||||
renderHeightMinus1 = reader.readBits(16);
|
||||
}
|
||||
|
||||
const width = widthMinus1 + 1;
|
||||
const height = heightMinus1 + 1;
|
||||
|
||||
const sampleRate = width * height * frameRate;
|
||||
const resolution = width * height;
|
||||
|
||||
let estimateLevel = '62';
|
||||
for (const { level, maxSampleRate, maxResolution } of VP9PerformenceLevel) {
|
||||
if (sampleRate <= maxSampleRate && resolution <= maxResolution) {
|
||||
// 检查 profile 和 bitDepth 的额外要求
|
||||
if (profile >= 2 && bitDepth > 8 && Number.parseFloat(level) < 20) {
|
||||
continue;
|
||||
}
|
||||
estimateLevel = level;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
profile,
|
||||
bitDepth,
|
||||
colorSpace,
|
||||
subsampling,
|
||||
yuvRangeFlag,
|
||||
width,
|
||||
height,
|
||||
hasScaling,
|
||||
renderWidth: renderWidthMinus1 + 1,
|
||||
renderHeight: renderHeightMinus1 + 1,
|
||||
frameRate,
|
||||
estimateLevel,
|
||||
};
|
||||
}
|
||||
|
||||
// The format of the 'vp09' codec string is specified in the webm GitHub repo:
|
||||
// <https://github.com/webmproject/vp9-dash/blob/master/VPCodecISOMediaFileFormatBinding.md#codecs-parameter-string>
|
||||
//
|
||||
// The codecs parameter string for the VP codec family is as follows:
|
||||
// <sample entry 4CC>.<profile>.<level>.<bitDepth>.<chromaSubsampling>.
|
||||
// <colourPrimaries>.<transferCharacteristics>.<matrixCoefficients>.
|
||||
// <videoFullRangeFlag>
|
||||
// All parameter values are expressed as double-digit decimals.
|
||||
// sample entry 4CC, profile, level, and bitDepth are all mandatory fields.
|
||||
export function genCodecStringByVP9DecoderConfigurationRecord(
|
||||
config: VP9DecoderConfigurationRecordType
|
||||
): string {
|
||||
const profileStr = config.profile.toString().padStart(2, '0');
|
||||
const bitDepthStr = config.bitDepth.toString().padStart(2, '0');
|
||||
const levelStr = config.estimateLevel;
|
||||
|
||||
return `vp09.${profileStr}.${levelStr}.${bitDepthStr}`;
|
||||
}
|
0
packages/matroska/src/index.ts
Normal file
0
packages/matroska/src/index.ts
Normal file
14
packages/matroska/src/model/cluster.ts
Normal file
14
packages/matroska/src/model/cluster.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import type { ClusterType } from '../schema';
|
||||
|
||||
export function* clusterBlocks(cluster: ClusterType) {
|
||||
if (cluster.SimpleBlock) {
|
||||
for (const simpleBlock of cluster.SimpleBlock) {
|
||||
yield simpleBlock;
|
||||
}
|
||||
}
|
||||
if (cluster.BlockGroup) {
|
||||
for (const block of cluster.BlockGroup) {
|
||||
yield block;
|
||||
}
|
||||
}
|
||||
}
|
70
packages/matroska/src/model/index.ts
Normal file
70
packages/matroska/src/model/index.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { type EbmlEBMLTagType, EbmlTagIdEnum, EbmlTagPosition } from 'konoebml';
|
||||
import {
|
||||
switchMap,
|
||||
filter,
|
||||
take,
|
||||
shareReplay,
|
||||
map,
|
||||
combineLatest,
|
||||
of, type Observable, delayWhen, pipe, finalize, tap, throwIfEmpty,
|
||||
} from 'rxjs';
|
||||
import { isTagIdPos } from '../util';
|
||||
import {createRangedEbmlStream, type CreateRangedEbmlStreamOptions} from './resource';
|
||||
import { type MatroskaSegmentModel, createMatroskaSegment } from './segment';
|
||||
|
||||
export type CreateMatroskaOptions = Omit<
|
||||
CreateRangedEbmlStreamOptions,
|
||||
'byteStart' | 'byteEnd'
|
||||
>;
|
||||
|
||||
export interface MatroskaModel {
|
||||
totalSize?: number;
|
||||
initResponse: Response;
|
||||
head: EbmlEBMLTagType;
|
||||
segment: MatroskaSegmentModel;
|
||||
}
|
||||
|
||||
export function createMatroska(options: CreateMatroskaOptions): Observable<MatroskaModel> {
|
||||
const metadataRequest$ = createRangedEbmlStream({
|
||||
...options,
|
||||
byteStart: 0,
|
||||
});
|
||||
|
||||
return metadataRequest$.pipe(
|
||||
switchMap(({ totalSize, ebml$, response }) => {
|
||||
|
||||
/**
|
||||
* while [matroska v4](https://www.matroska.org/technical/elements.html) doc tell that there is only one segment in a file
|
||||
* some mkv generated by strange tools will emit several
|
||||
*/
|
||||
const segment$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Segment, EbmlTagPosition.Start)),
|
||||
map((startTag) => createMatroskaSegment({
|
||||
startTag,
|
||||
matroskaOptions: options,
|
||||
ebml$,
|
||||
})),
|
||||
delayWhen(
|
||||
({ loadedMetadata$ }) => loadedMetadata$
|
||||
),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const head$ = ebml$.pipe(
|
||||
filter(isTagIdPos(EbmlTagIdEnum.EBML, EbmlTagPosition.End)),
|
||||
take(1),
|
||||
shareReplay(1),
|
||||
throwIfEmpty(() => new Error("failed to find head tag"))
|
||||
);
|
||||
|
||||
return combineLatest({
|
||||
segment: segment$,
|
||||
head: head$,
|
||||
totalSize: of(totalSize),
|
||||
initResponse: of(response),
|
||||
});
|
||||
}),
|
||||
shareReplay(1)
|
||||
);
|
||||
}
|
89
packages/matroska/src/model/resource.ts
Normal file
89
packages/matroska/src/model/resource.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import {
|
||||
type CreateRangedStreamOptions,
|
||||
createRangedStream,
|
||||
} from '@konoplayer/core/data';
|
||||
import { type EbmlTagType, EbmlStreamDecoder, EbmlTagIdEnum } from 'konoebml';
|
||||
import {Observable, from, switchMap, share, defer, EMPTY, of, tap} from 'rxjs';
|
||||
import { waitTick } from '../util';
|
||||
|
||||
export interface CreateRangedEbmlStreamOptions extends CreateRangedStreamOptions {
|
||||
refCount?: boolean
|
||||
}
|
||||
|
||||
export function createRangedEbmlStream({
|
||||
url,
|
||||
byteStart = 0,
|
||||
byteEnd
|
||||
}: CreateRangedEbmlStreamOptions): Observable<{
|
||||
ebml$: Observable<EbmlTagType>;
|
||||
totalSize?: number;
|
||||
response: Response;
|
||||
body: ReadableStream<Uint8Array>;
|
||||
controller: AbortController;
|
||||
}> {
|
||||
const stream$ = from(createRangedStream({ url, byteStart, byteEnd }));
|
||||
|
||||
return stream$.pipe(
|
||||
switchMap(({ controller, body, totalSize, response }) => {
|
||||
let requestCompleted = false;
|
||||
|
||||
const ebml$ = new Observable<EbmlTagType>((subscriber) => {
|
||||
if (requestCompleted) {
|
||||
subscriber.complete();
|
||||
}
|
||||
body
|
||||
.pipeThrough(
|
||||
new EbmlStreamDecoder({
|
||||
streamStartOffset: byteStart,
|
||||
collectChild: (child) => child.id !== EbmlTagIdEnum.Cluster,
|
||||
backpressure: {
|
||||
eventLoop: waitTick,
|
||||
},
|
||||
})
|
||||
)
|
||||
.pipeTo(
|
||||
new WritableStream({
|
||||
write: async (tag) => {
|
||||
await waitTick();
|
||||
subscriber.next(tag);
|
||||
},
|
||||
close: () => {
|
||||
if (!requestCompleted) {
|
||||
requestCompleted = true;
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
})
|
||||
)
|
||||
.catch((error) => {
|
||||
if (requestCompleted && error?.name === 'AbortError') {
|
||||
return;
|
||||
}
|
||||
requestCompleted = true;
|
||||
subscriber.error(error);
|
||||
});
|
||||
|
||||
return () => {
|
||||
if (!requestCompleted) {
|
||||
requestCompleted = true;
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
}).pipe(
|
||||
share({
|
||||
resetOnComplete: false,
|
||||
resetOnError: false,
|
||||
resetOnRefCountZero: true,
|
||||
})
|
||||
);
|
||||
|
||||
return of({
|
||||
totalSize,
|
||||
response,
|
||||
body,
|
||||
controller,
|
||||
ebml$
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
427
packages/matroska/src/model/segment.ts
Normal file
427
packages/matroska/src/model/segment.ts
Normal file
@ -0,0 +1,427 @@
|
||||
import { createAudioDecodeStream } from '@konoplayer/core/audition';
|
||||
import { createVideoDecodeStream } from '@konoplayer/core/graphics';
|
||||
import {
|
||||
type EbmlSegmentTagType,
|
||||
type EbmlTagType,
|
||||
EbmlTagIdEnum,
|
||||
EbmlTagPosition,
|
||||
} from 'konoebml';
|
||||
import {
|
||||
type Observable,
|
||||
scan,
|
||||
takeWhile,
|
||||
share,
|
||||
map,
|
||||
switchMap,
|
||||
shareReplay,
|
||||
EMPTY,
|
||||
filter,
|
||||
withLatestFrom,
|
||||
take,
|
||||
of,
|
||||
merge,
|
||||
isEmpty,
|
||||
finalize,
|
||||
delayWhen,
|
||||
from,
|
||||
} from 'rxjs';
|
||||
import type { CreateMatroskaOptions } from '.';
|
||||
import { type ClusterType, TrackTypeRestrictionEnum } from '../schema';
|
||||
import {
|
||||
SegmentSystem,
|
||||
type SegmentComponent,
|
||||
type VideoTrackContext,
|
||||
type AudioTrackContext,
|
||||
SEEK_ID_KAX_CUES,
|
||||
SEEK_ID_KAX_TAGS,
|
||||
type CueSystem,
|
||||
} from '../systems';
|
||||
import {
|
||||
standardTrackPredicate,
|
||||
standardTrackPriority,
|
||||
} from '../systems/track';
|
||||
import { isTagIdPos } from '../util';
|
||||
import { createRangedEbmlStream } from './resource';
|
||||
|
||||
export interface CreateMatroskaSegmentOptions {
|
||||
matroskaOptions: CreateMatroskaOptions;
|
||||
startTag: EbmlSegmentTagType;
|
||||
ebml$: Observable<EbmlTagType>;
|
||||
}
|
||||
|
||||
export interface MatroskaSegmentModel {
|
||||
startTag: EbmlSegmentTagType;
|
||||
segment: SegmentSystem;
|
||||
loadedMetadata$: Observable<SegmentSystem>;
|
||||
loadedTags$: Observable<SegmentSystem>;
|
||||
loadedCues$: Observable<SegmentSystem>;
|
||||
seek: (seekTime: number) => Observable<SegmentComponent<ClusterType>>;
|
||||
videoTrackDecoder: (
|
||||
track: VideoTrackContext,
|
||||
cluster$: Observable<ClusterType>
|
||||
) => Observable<{
|
||||
track: VideoTrackContext;
|
||||
decoder: VideoDecoder;
|
||||
frame$: Observable<VideoFrame>;
|
||||
}>;
|
||||
audioTrackDecoder: (
|
||||
track: AudioTrackContext,
|
||||
cluster$: Observable<ClusterType>
|
||||
) => Observable<{
|
||||
track: AudioTrackContext;
|
||||
decoder: AudioDecoder;
|
||||
frame$: Observable<AudioData>;
|
||||
}>;
|
||||
defaultVideoTrack$: Observable<VideoTrackContext | undefined>;
|
||||
defaultAudioTrack$: Observable<AudioTrackContext | undefined>;
|
||||
}
|
||||
|
||||
export function createMatroskaSegment({
|
||||
matroskaOptions,
|
||||
startTag,
|
||||
ebml$,
|
||||
}: CreateMatroskaSegmentOptions): MatroskaSegmentModel {
|
||||
const segment = new SegmentSystem(startTag);
|
||||
const clusterSystem = segment.cluster;
|
||||
const seekSystem = segment.seek;
|
||||
|
||||
const metaScan$ = ebml$.pipe(
|
||||
scan(
|
||||
(acc, tag) => {
|
||||
const segment = acc.segment;
|
||||
segment.scanMeta(tag);
|
||||
acc.tag = tag;
|
||||
acc.canComplete = segment.canCompleteMeta();
|
||||
return acc;
|
||||
},
|
||||
{
|
||||
segment,
|
||||
tag: undefined as unknown as EbmlTagType,
|
||||
canComplete: false,
|
||||
}
|
||||
),
|
||||
takeWhile(({ canComplete }) => !canComplete, true),
|
||||
delayWhen(({ segment }) => from(segment.completeMeta())),
|
||||
share({
|
||||
resetOnComplete: false,
|
||||
resetOnError: false,
|
||||
resetOnRefCountZero: true,
|
||||
})
|
||||
);
|
||||
|
||||
const loadedMetadata$ = metaScan$.pipe(
|
||||
filter(({ canComplete }) => canComplete),
|
||||
map(({ segment }) => segment),
|
||||
take(1),
|
||||
shareReplay(1),
|
||||
);
|
||||
|
||||
const loadedRemoteCues$ = loadedMetadata$.pipe(
|
||||
switchMap((s) => {
|
||||
const cueSystem = s.cue;
|
||||
const seekSystem = s.seek;
|
||||
if (cueSystem.prepared) {
|
||||
return EMPTY;
|
||||
}
|
||||
const remoteCuesTagStartOffset =
|
||||
seekSystem.seekOffsetBySeekId(SEEK_ID_KAX_CUES);
|
||||
if (remoteCuesTagStartOffset! >= 0) {
|
||||
return createRangedEbmlStream({
|
||||
...matroskaOptions,
|
||||
byteStart: remoteCuesTagStartOffset,
|
||||
}).pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cues, EbmlTagPosition.End)),
|
||||
withLatestFrom(loadedMetadata$),
|
||||
map(([cues, withMeta]) => {
|
||||
withMeta.cue.prepareCuesWithTag(cues);
|
||||
return withMeta;
|
||||
})
|
||||
);
|
||||
}
|
||||
return EMPTY;
|
||||
}),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const loadedLocalCues$ = loadedMetadata$.pipe(
|
||||
switchMap((s) => (s.cue.prepared ? of(s) : EMPTY)),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const loadedEmptyCues$ = merge(loadedLocalCues$, loadedRemoteCues$).pipe(
|
||||
isEmpty(),
|
||||
switchMap((empty) => (empty ? loadedMetadata$ : EMPTY))
|
||||
);
|
||||
|
||||
const loadedCues$ = merge(
|
||||
loadedLocalCues$,
|
||||
loadedRemoteCues$,
|
||||
loadedEmptyCues$
|
||||
).pipe(take(1));
|
||||
|
||||
const loadedRemoteTags$ = loadedMetadata$.pipe(
|
||||
switchMap((s) => {
|
||||
const tagSystem = s.tag;
|
||||
const seekSystem = s.seek;
|
||||
if (tagSystem.prepared) {
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
const remoteTagsTagStartOffset =
|
||||
seekSystem.seekOffsetBySeekId(SEEK_ID_KAX_TAGS);
|
||||
if (remoteTagsTagStartOffset! >= 0) {
|
||||
return createRangedEbmlStream({
|
||||
...matroskaOptions,
|
||||
byteStart: remoteTagsTagStartOffset,
|
||||
}).pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Tags, EbmlTagPosition.End)),
|
||||
withLatestFrom(loadedMetadata$),
|
||||
map(([tags, withMeta]) => {
|
||||
withMeta.tag.prepareTagsWithTag(tags);
|
||||
return withMeta;
|
||||
})
|
||||
);
|
||||
}
|
||||
return EMPTY;
|
||||
}),
|
||||
take(1),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const loadedLocalTags$ = loadedMetadata$.pipe(
|
||||
switchMap((s) => (s.tag.prepared ? of(s) : EMPTY)),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const loadedEmptyTags$ = merge(loadedRemoteTags$, loadedLocalTags$).pipe(
|
||||
isEmpty(),
|
||||
switchMap((empty) => (empty ? loadedMetadata$ : EMPTY))
|
||||
);
|
||||
|
||||
const loadedTags$ = merge(
|
||||
loadedLocalTags$,
|
||||
loadedRemoteTags$,
|
||||
loadedEmptyTags$
|
||||
).pipe(take(1));
|
||||
|
||||
const seekWithoutCues = (
|
||||
seekTime: number
|
||||
): Observable<SegmentComponent<ClusterType>> => {
|
||||
const request$ = loadedMetadata$.pipe(
|
||||
switchMap(() =>
|
||||
createRangedEbmlStream({
|
||||
...matroskaOptions,
|
||||
byteStart: seekSystem.firstClusterOffset,
|
||||
})
|
||||
)
|
||||
);
|
||||
const cluster$ = request$.pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
|
||||
map((tag) => clusterSystem.addClusterWithTag(tag))
|
||||
);
|
||||
|
||||
if (seekTime === 0) {
|
||||
return cluster$;
|
||||
}
|
||||
|
||||
return cluster$.pipe(
|
||||
scan(
|
||||
(acc, curr) => {
|
||||
// avoid object recreation
|
||||
acc.prev = acc.next;
|
||||
acc.next = curr;
|
||||
return acc;
|
||||
},
|
||||
{
|
||||
prev: undefined as SegmentComponent<ClusterType> | undefined,
|
||||
next: undefined as SegmentComponent<ClusterType> | undefined,
|
||||
}
|
||||
),
|
||||
filter((c) => c.next?.Timestamp! > seekTime),
|
||||
map((c) => c.prev ?? c.next!)
|
||||
);
|
||||
};
|
||||
|
||||
const seekWithCues = (
|
||||
cueSystem: CueSystem,
|
||||
seekTime: number
|
||||
): Observable<SegmentComponent<ClusterType>> => {
|
||||
if (seekTime === 0) {
|
||||
return seekWithoutCues(seekTime);
|
||||
}
|
||||
|
||||
const cuePoint = cueSystem.findClosestCue(seekTime);
|
||||
|
||||
if (!cuePoint) {
|
||||
return seekWithoutCues(seekTime);
|
||||
}
|
||||
|
||||
return createRangedEbmlStream({
|
||||
...matroskaOptions,
|
||||
byteStart: seekSystem.offsetFromSeekPosition(
|
||||
cueSystem.getCueTrackPositions(cuePoint).CueClusterPosition as number
|
||||
),
|
||||
}).pipe(
|
||||
switchMap((req) => req.ebml$),
|
||||
filter(isTagIdPos(EbmlTagIdEnum.Cluster, EbmlTagPosition.End)),
|
||||
map(clusterSystem.addClusterWithTag.bind(clusterSystem))
|
||||
);
|
||||
};
|
||||
|
||||
const seek = (
|
||||
seekTime: number
|
||||
): Observable<SegmentComponent<ClusterType>> => {
|
||||
if (seekTime === 0) {
|
||||
const subscription = loadedCues$.subscribe();
|
||||
|
||||
// if seekTime equals to 0 at start, reuse the initialize stream
|
||||
return seekWithoutCues(seekTime).pipe(
|
||||
finalize(() => {
|
||||
subscription.unsubscribe();
|
||||
})
|
||||
);
|
||||
}
|
||||
return loadedCues$.pipe(
|
||||
switchMap((segment) => {
|
||||
const cueSystem = segment.cue;
|
||||
if (cueSystem.prepared) {
|
||||
return seekWithCues(cueSystem, seekTime);
|
||||
}
|
||||
return seekWithoutCues(seekTime);
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const videoTrackDecoder = (
|
||||
track: VideoTrackContext,
|
||||
cluster$: Observable<ClusterType>
|
||||
) => {
|
||||
return createVideoDecodeStream(track.configuration).pipe(
|
||||
map(({ decoder, frame$ }) => {
|
||||
const clusterSystem = segment.cluster;
|
||||
const infoSystem = segment.info;
|
||||
const timestampScale = Number(infoSystem.info.TimestampScale) / 1000;
|
||||
|
||||
const decodeSubscription = cluster$.subscribe((cluster) => {
|
||||
for (const block of clusterSystem.enumerateBlocks(
|
||||
cluster,
|
||||
track.trackEntry
|
||||
)) {
|
||||
const blockTime = (Number(cluster.Timestamp) + block.relTime) * timestampScale;
|
||||
const blockDuration =
|
||||
frames.length > 1 ? track.predictBlockDuration(blockTime) * timestampScale : 0;
|
||||
const perFrameDuration =
|
||||
frames.length > 1 && blockDuration
|
||||
? blockDuration / block.frames.length
|
||||
: 0;
|
||||
|
||||
for (const frame of block.frames) {
|
||||
const chunk = new EncodedVideoChunk({
|
||||
type: block.keyframe ? 'key' : 'delta',
|
||||
data: frame,
|
||||
timestamp: blockTime + perFrameDuration,
|
||||
});
|
||||
|
||||
decoder.decode(chunk);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
track,
|
||||
decoder,
|
||||
frame$: frame$
|
||||
.pipe(
|
||||
finalize(() => {
|
||||
decodeSubscription.unsubscribe();
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const audioTrackDecoder = (
|
||||
track: AudioTrackContext,
|
||||
cluster$: Observable<ClusterType>
|
||||
) => {
|
||||
return createAudioDecodeStream(track.configuration).pipe(
|
||||
map(({ decoder, frame$ }) => {
|
||||
const clusterSystem = segment.cluster;
|
||||
const infoSystem = segment.info;
|
||||
const timestampScale = Number(infoSystem.info.TimestampScale) / 1000;
|
||||
|
||||
const decodeSubscription = cluster$.subscribe((cluster) => {
|
||||
for (const block of clusterSystem.enumerateBlocks(
|
||||
cluster,
|
||||
track.trackEntry
|
||||
)) {
|
||||
const blockTime = (Number(cluster.Timestamp) + block.relTime) * timestampScale;
|
||||
const blockDuration =
|
||||
frames.length > 1 ? track.predictBlockDuration(blockTime) : 0;
|
||||
const perFrameDuration =
|
||||
frames.length > 1 && blockDuration
|
||||
? blockDuration / block.frames.length
|
||||
: 0;
|
||||
|
||||
let i = 0;
|
||||
for (const frame of block.frames) {
|
||||
const chunk = new EncodedAudioChunk({
|
||||
type: block.keyframe ? 'key' : 'delta',
|
||||
data: frame,
|
||||
timestamp: blockTime + perFrameDuration * i,
|
||||
});
|
||||
i++;
|
||||
|
||||
decoder.decode(chunk);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
track,
|
||||
decoder,
|
||||
frame$: frame$.pipe(finalize(() => decodeSubscription.unsubscribe())),
|
||||
};
|
||||
}));
|
||||
};
|
||||
|
||||
const defaultVideoTrack$ = loadedMetadata$.pipe(
|
||||
map((segment) =>
|
||||
segment.track.getTrackContext<VideoTrackContext>({
|
||||
predicate: (track) =>
|
||||
track.TrackType === TrackTypeRestrictionEnum.VIDEO &&
|
||||
standardTrackPredicate(track),
|
||||
priority: standardTrackPriority,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
const defaultAudioTrack$ = loadedMetadata$.pipe(
|
||||
map((segment) =>
|
||||
segment.track.getTrackContext<AudioTrackContext>({
|
||||
predicate: (track) =>
|
||||
track.TrackType === TrackTypeRestrictionEnum.AUDIO &&
|
||||
standardTrackPredicate(track),
|
||||
priority: standardTrackPriority,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
return {
|
||||
startTag,
|
||||
segment,
|
||||
loadedMetadata$,
|
||||
loadedTags$,
|
||||
loadedCues$,
|
||||
seek,
|
||||
videoTrackDecoder,
|
||||
audioTrackDecoder,
|
||||
defaultVideoTrack$,
|
||||
defaultAudioTrack$
|
||||
};
|
||||
}
|
@ -1,25 +1,28 @@
|
||||
import { type, match } from 'arktype';
|
||||
import { EbmlTagIdEnum, EbmlSimpleBlockTag ,EbmlBlockTag } from 'konoebml';
|
||||
import { EbmlTagIdEnum, EbmlSimpleBlockTag, EbmlBlockTag } from 'konoebml';
|
||||
|
||||
export const BinarySchema = type.instanceOf(Uint8Array);
|
||||
export type BinaryType = typeof BinarySchema.infer;
|
||||
export const SimpleBlockSchema = type.instanceOf(EbmlSimpleBlockTag);
|
||||
export const BlockSchema = type.instanceOf(EbmlBlockTag);
|
||||
export type SimpleBlockType = typeof SimpleBlockSchema.infer;
|
||||
export type BlockType = typeof BlockSchema.infer;
|
||||
|
||||
export const DocTypeExtensionSchema = type({
|
||||
DocTypeExtensionName: type.string,
|
||||
DocTypeExtensionVersion: type.number,
|
||||
DocTypeExtensionVersion: type.number.or(type.bigint),
|
||||
});
|
||||
|
||||
export type DocTypeExtensionType = typeof DocTypeExtensionSchema.infer;
|
||||
|
||||
export const EBMLSchema = type({
|
||||
EBMLVersion: type.number.default(1),
|
||||
EBMLReadVersion: type.number.default(1),
|
||||
EBMLMaxIDLength: type.number.default(4),
|
||||
EBMLMaxSizeLength: type.number.default(8),
|
||||
EBMLVersion: type.number.or(type.bigint).default(1),
|
||||
EBMLReadVersion: type.number.or(type.bigint).default(1),
|
||||
EBMLMaxIDLength: type.number.or(type.bigint).default(4),
|
||||
EBMLMaxSizeLength: type.number.or(type.bigint).default(8),
|
||||
DocType: type.string,
|
||||
DocTypeVersion: type.number.default(1),
|
||||
DocTypeReadVersion: type.number.default(1),
|
||||
DocTypeVersion: type.number.or(type.bigint).default(1),
|
||||
DocTypeReadVersion: type.number.or(type.bigint).default(1),
|
||||
DocTypeExtension: DocTypeExtensionSchema.array().optional(),
|
||||
});
|
||||
|
||||
@ -27,7 +30,7 @@ export type EBMLType = typeof EBMLSchema.infer;
|
||||
|
||||
export const SeekSchema = type({
|
||||
SeekID: BinarySchema,
|
||||
SeekPosition: type.number,
|
||||
SeekPosition: type.number.or(type.bigint),
|
||||
});
|
||||
|
||||
export type SeekType = typeof SeekSchema.infer;
|
||||
@ -40,8 +43,8 @@ export type SeekHeadType = typeof SeekHeadSchema.infer;
|
||||
|
||||
export const ChapterTranslateSchema = type({
|
||||
ChapterTranslateID: BinarySchema,
|
||||
ChapterTranslateCodec: type.number,
|
||||
ChapterTranslateEditionUID: type.number.array().optional(),
|
||||
ChapterTranslateCodec: type.number.or(type.bigint),
|
||||
ChapterTranslateEditionUID: type.number.or(type.bigint).array().optional(),
|
||||
});
|
||||
|
||||
export type ChapterTranslateType = typeof ChapterTranslateSchema.infer;
|
||||
@ -55,7 +58,7 @@ export const InfoSchema = type({
|
||||
NextFilename: type.string.optional(),
|
||||
SegmentFamily: BinarySchema.array().optional(),
|
||||
ChapterTranslate: ChapterTranslateSchema.array().optional(),
|
||||
TimestampScale: type.number.default(1000000),
|
||||
TimestampScale: type.number.or(type.bigint).default(1000000),
|
||||
Duration: type.number.optional(),
|
||||
DateUTC: BinarySchema.optional(),
|
||||
Title: type.string.optional(),
|
||||
@ -66,14 +69,14 @@ export const InfoSchema = type({
|
||||
export type InfoType = typeof InfoSchema.infer;
|
||||
|
||||
export const SilentTracksSchema = type({
|
||||
SilentTrackNumber: type.number.array().optional(),
|
||||
SilentTrackNumber: type.number.or(type.bigint).array().optional(),
|
||||
});
|
||||
|
||||
export type SilentTracksType = typeof SilentTracksSchema.infer;
|
||||
|
||||
export const BlockMoreSchema = type({
|
||||
BlockAdditional: BinarySchema,
|
||||
BlockAddID: type.number.default(1),
|
||||
BlockAddID: type.number.or(type.bigint).default(1),
|
||||
});
|
||||
|
||||
export type BlockMoreType = typeof BlockMoreSchema.infer;
|
||||
@ -85,11 +88,11 @@ export const BlockAdditionsSchema = type({
|
||||
export type BlockAdditionsType = typeof BlockAdditionsSchema.infer;
|
||||
|
||||
export const TimeSliceSchema = type({
|
||||
LaceNumber: type.number.optional(),
|
||||
FrameNumber: type.number.default(0),
|
||||
BlockAdditionID: type.number.default(0),
|
||||
Delay: type.number.default(0),
|
||||
SliceDuration: type.number.default(0),
|
||||
LaceNumber: type.number.or(type.bigint).optional(),
|
||||
FrameNumber: type.number.or(type.bigint).default(0),
|
||||
BlockAdditionID: type.number.or(type.bigint).default(0),
|
||||
Delay: type.number.or(type.bigint).default(0),
|
||||
SliceDuration: type.number.or(type.bigint).default(0),
|
||||
});
|
||||
|
||||
export type TimeSliceType = typeof TimeSliceSchema.infer;
|
||||
@ -101,8 +104,8 @@ export const SlicesSchema = type({
|
||||
export type SlicesType = typeof SlicesSchema.infer;
|
||||
|
||||
export const ReferenceFrameSchema = type({
|
||||
ReferenceOffset: type.number,
|
||||
ReferenceTimestamp: type.number,
|
||||
ReferenceOffset: type.number.or(type.bigint),
|
||||
ReferenceTimestamp: type.number.or(type.bigint),
|
||||
});
|
||||
|
||||
export type ReferenceFrameType = typeof ReferenceFrameSchema.infer;
|
||||
@ -111,12 +114,12 @@ export const BlockGroupSchema = type({
|
||||
Block: BlockSchema,
|
||||
BlockVirtual: BinarySchema.optional(),
|
||||
BlockAdditions: BlockAdditionsSchema.optional(),
|
||||
BlockDuration: type.number.optional(),
|
||||
ReferencePriority: type.number.default(0),
|
||||
ReferenceBlock: type.number.array().optional(),
|
||||
ReferenceVirtual: type.number.optional(),
|
||||
BlockDuration: type.number.or(type.bigint).optional(),
|
||||
ReferencePriority: type.number.or(type.bigint).default(0),
|
||||
ReferenceBlock: type.number.or(type.bigint).array().optional(),
|
||||
ReferenceVirtual: type.number.or(type.bigint).optional(),
|
||||
CodecState: BinarySchema.optional(),
|
||||
DiscardPadding: type.number.optional(),
|
||||
DiscardPadding: type.number.or(type.bigint).optional(),
|
||||
Slices: SlicesSchema.optional(),
|
||||
ReferenceFrame: ReferenceFrameSchema.optional(),
|
||||
});
|
||||
@ -124,10 +127,10 @@ export const BlockGroupSchema = type({
|
||||
export type BlockGroupType = typeof BlockGroupSchema.infer;
|
||||
|
||||
export const ClusterSchema = type({
|
||||
Timestamp: type.number,
|
||||
Timestamp: type.number.or(type.bigint),
|
||||
SilentTracks: SilentTracksSchema.optional(),
|
||||
Position: type.number.optional(),
|
||||
PrevSize: type.number.optional(),
|
||||
Position: type.number.or(type.bigint).optional(),
|
||||
PrevSize: type.number.or(type.bigint).optional(),
|
||||
SimpleBlock: SimpleBlockSchema.array().optional(),
|
||||
BlockGroup: BlockGroupSchema.array().optional(),
|
||||
EncryptedBlock: BinarySchema.array().optional(),
|
||||
@ -136,9 +139,9 @@ export const ClusterSchema = type({
|
||||
export type ClusterType = typeof ClusterSchema.infer;
|
||||
|
||||
export const BlockAdditionMappingSchema = type({
|
||||
BlockAddIDValue: type.number.optional(),
|
||||
BlockAddIDValue: type.number.or(type.bigint).optional(),
|
||||
BlockAddIDName: type.string.optional(),
|
||||
BlockAddIDType: type.number.default(0),
|
||||
BlockAddIDType: type.number.or(type.bigint).default(0),
|
||||
BlockAddIDExtraData: BinarySchema.optional(),
|
||||
});
|
||||
|
||||
@ -146,8 +149,8 @@ export type BlockAdditionMappingType = typeof BlockAdditionMappingSchema.infer;
|
||||
|
||||
export const TrackTranslateSchema = type({
|
||||
TrackTranslateTrackID: BinarySchema,
|
||||
TrackTranslateCodec: type.number,
|
||||
TrackTranslateEditionUID: type.number.array().optional(),
|
||||
TrackTranslateCodec: type.number.or(type.bigint),
|
||||
TrackTranslateEditionUID: type.number.or(type.bigint).array().optional(),
|
||||
});
|
||||
|
||||
export type TrackTranslateType = typeof TrackTranslateSchema.infer;
|
||||
@ -198,9 +201,12 @@ export enum MatrixCoefficientsRestrictionEnum {
|
||||
CHROMA_DERIVED_CONSTANT_LUMINANCE = 13,
|
||||
// ITU-R BT.2100-0
|
||||
ITU_R_BT_2100_0 = 14,
|
||||
};
|
||||
export const MatrixCoefficientsRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14');
|
||||
export type MatrixCoefficientsRestrictionType = typeof MatrixCoefficientsRestriction.infer;
|
||||
}
|
||||
export const MatrixCoefficientsRestriction = type(
|
||||
'0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14'
|
||||
);
|
||||
export type MatrixCoefficientsRestrictionType =
|
||||
typeof MatrixCoefficientsRestriction.infer;
|
||||
|
||||
export enum ChromaSitingHorzRestrictionEnum {
|
||||
// unspecified
|
||||
@ -209,9 +215,10 @@ export enum ChromaSitingHorzRestrictionEnum {
|
||||
LEFT_COLLOCATED = 1,
|
||||
// half
|
||||
HALF = 2,
|
||||
};
|
||||
}
|
||||
export const ChromaSitingHorzRestriction = type('0 | 1 | 2');
|
||||
export type ChromaSitingHorzRestrictionType = typeof ChromaSitingHorzRestriction.infer;
|
||||
export type ChromaSitingHorzRestrictionType =
|
||||
typeof ChromaSitingHorzRestriction.infer;
|
||||
|
||||
export enum ChromaSitingVertRestrictionEnum {
|
||||
// unspecified
|
||||
@ -220,9 +227,10 @@ export enum ChromaSitingVertRestrictionEnum {
|
||||
TOP_COLLOCATED = 1,
|
||||
// half
|
||||
HALF = 2,
|
||||
};
|
||||
}
|
||||
export const ChromaSitingVertRestriction = type('0 | 1 | 2');
|
||||
export type ChromaSitingVertRestrictionType = typeof ChromaSitingVertRestriction.infer;
|
||||
export type ChromaSitingVertRestrictionType =
|
||||
typeof ChromaSitingVertRestriction.infer;
|
||||
|
||||
export enum RangeRestrictionEnum {
|
||||
// unspecified
|
||||
@ -233,7 +241,7 @@ export enum RangeRestrictionEnum {
|
||||
FULL_RANGE_NO_CLIPPING = 2,
|
||||
// defined by MatrixCoefficients / TransferCharacteristics
|
||||
DEFINED_BY_MATRIX_COEFFICIENTS_TRANSFER_CHARACTERISTICS = 3,
|
||||
};
|
||||
}
|
||||
export const RangeRestriction = type('0 | 1 | 2 | 3');
|
||||
export type RangeRestrictionType = typeof RangeRestriction.infer;
|
||||
|
||||
@ -276,9 +284,12 @@ export enum TransferCharacteristicsRestrictionEnum {
|
||||
SMPTE_ST_428_1 = 17,
|
||||
// ARIB STD-B67 (HLG)
|
||||
ARIB_STD_B67_HLG = 18,
|
||||
};
|
||||
export const TransferCharacteristicsRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18');
|
||||
export type TransferCharacteristicsRestrictionType = typeof TransferCharacteristicsRestriction.infer;
|
||||
}
|
||||
export const TransferCharacteristicsRestriction = type(
|
||||
'0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18'
|
||||
);
|
||||
export type TransferCharacteristicsRestrictionType =
|
||||
typeof TransferCharacteristicsRestriction.infer;
|
||||
|
||||
export enum PrimariesRestrictionEnum {
|
||||
// reserved
|
||||
@ -309,24 +320,26 @@ export enum PrimariesRestrictionEnum {
|
||||
SMPTE_EG_432_2 = 12,
|
||||
// EBU Tech. 3213-E - JEDEC P22 phosphors
|
||||
EBU_TECH_3213_E_JEDEC_P22_PHOSPHORS = 22,
|
||||
};
|
||||
export const PrimariesRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 22');
|
||||
}
|
||||
export const PrimariesRestriction = type(
|
||||
'0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 22'
|
||||
);
|
||||
export type PrimariesRestrictionType = typeof PrimariesRestriction.infer;
|
||||
|
||||
export const ColourSchema = type({
|
||||
MatrixCoefficients: MatrixCoefficientsRestriction.default(2),
|
||||
BitsPerChannel: type.number.default(0),
|
||||
ChromaSubsamplingHorz: type.number.optional(),
|
||||
ChromaSubsamplingVert: type.number.optional(),
|
||||
CbSubsamplingHorz: type.number.optional(),
|
||||
CbSubsamplingVert: type.number.optional(),
|
||||
BitsPerChannel: type.number.or(type.bigint).default(0),
|
||||
ChromaSubsamplingHorz: type.number.or(type.bigint).optional(),
|
||||
ChromaSubsamplingVert: type.number.or(type.bigint).optional(),
|
||||
CbSubsamplingHorz: type.number.or(type.bigint).optional(),
|
||||
CbSubsamplingVert: type.number.or(type.bigint).optional(),
|
||||
ChromaSitingHorz: ChromaSitingHorzRestriction.default(0),
|
||||
ChromaSitingVert: ChromaSitingVertRestriction.default(0),
|
||||
Range: RangeRestriction.default(0),
|
||||
TransferCharacteristics: TransferCharacteristicsRestriction.default(2),
|
||||
Primaries: PrimariesRestriction.default(2),
|
||||
MaxCLL: type.number.optional(),
|
||||
MaxFALL: type.number.optional(),
|
||||
MaxCLL: type.number.or(type.bigint).optional(),
|
||||
MaxFALL: type.number.or(type.bigint).optional(),
|
||||
MasteringMetadata: MasteringMetadataSchema.optional(),
|
||||
});
|
||||
|
||||
@ -341,9 +354,10 @@ export enum ProjectionTypeRestrictionEnum {
|
||||
CUBEMAP = 2,
|
||||
// mesh
|
||||
MESH = 3,
|
||||
};
|
||||
}
|
||||
export const ProjectionTypeRestriction = type('0 | 1 | 2 | 3');
|
||||
export type ProjectionTypeRestrictionType = typeof ProjectionTypeRestriction.infer;
|
||||
export type ProjectionTypeRestrictionType =
|
||||
typeof ProjectionTypeRestriction.infer;
|
||||
|
||||
export const ProjectionSchema = type({
|
||||
ProjectionType: ProjectionTypeRestriction.default(0),
|
||||
@ -362,9 +376,10 @@ export enum FlagInterlacedRestrictionEnum {
|
||||
INTERLACED = 1,
|
||||
// progressive
|
||||
PROGRESSIVE = 2,
|
||||
};
|
||||
}
|
||||
export const FlagInterlacedRestriction = type('0 | 1 | 2');
|
||||
export type FlagInterlacedRestrictionType = typeof FlagInterlacedRestriction.infer;
|
||||
export type FlagInterlacedRestrictionType =
|
||||
typeof FlagInterlacedRestriction.infer;
|
||||
|
||||
export enum FieldOrderRestrictionEnum {
|
||||
// progressive
|
||||
@ -379,7 +394,7 @@ export enum FieldOrderRestrictionEnum {
|
||||
TFF_INTERLEAVED = 9,
|
||||
// bff (interleaved)
|
||||
BFF_INTERLEAVED = 14,
|
||||
};
|
||||
}
|
||||
export const FieldOrderRestriction = type('0 | 1 | 2 | 6 | 9 | 14');
|
||||
export type FieldOrderRestrictionType = typeof FieldOrderRestriction.infer;
|
||||
|
||||
@ -414,8 +429,10 @@ export enum StereoModeRestrictionEnum {
|
||||
BOTH_EYES_LACED_IN_ONE_BLOCK_LEFT_EYE_IS_FIRST = 13,
|
||||
// both eyes laced in one Block (right eye is first)
|
||||
BOTH_EYES_LACED_IN_ONE_BLOCK_RIGHT_EYE_IS_FIRST = 14,
|
||||
};
|
||||
export const StereoModeRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14');
|
||||
}
|
||||
export const StereoModeRestriction = type(
|
||||
'0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14'
|
||||
);
|
||||
export type StereoModeRestrictionType = typeof StereoModeRestriction.infer;
|
||||
|
||||
export enum AlphaModeRestrictionEnum {
|
||||
@ -423,7 +440,7 @@ export enum AlphaModeRestrictionEnum {
|
||||
NONE = 0,
|
||||
// present
|
||||
PRESENT = 1,
|
||||
};
|
||||
}
|
||||
export const AlphaModeRestriction = type('0 | 1');
|
||||
export type AlphaModeRestrictionType = typeof AlphaModeRestriction.infer;
|
||||
|
||||
@ -436,9 +453,10 @@ export enum OldStereoModeRestrictionEnum {
|
||||
LEFT_EYE = 2,
|
||||
// both eyes
|
||||
BOTH_EYES = 3,
|
||||
};
|
||||
}
|
||||
export const OldStereoModeRestriction = type('0 | 1 | 2 | 3');
|
||||
export type OldStereoModeRestrictionType = typeof OldStereoModeRestriction.infer;
|
||||
export type OldStereoModeRestrictionType =
|
||||
typeof OldStereoModeRestriction.infer;
|
||||
|
||||
export enum DisplayUnitRestrictionEnum {
|
||||
// pixels
|
||||
@ -451,7 +469,7 @@ export enum DisplayUnitRestrictionEnum {
|
||||
DISPLAY_ASPECT_RATIO = 3,
|
||||
// unknown
|
||||
UNKNOWN = 4,
|
||||
};
|
||||
}
|
||||
export const DisplayUnitRestriction = type('0 | 1 | 2 | 3 | 4');
|
||||
export type DisplayUnitRestrictionType = typeof DisplayUnitRestriction.infer;
|
||||
|
||||
@ -462,9 +480,10 @@ export enum AspectRatioTypeRestrictionEnum {
|
||||
KEEP_ASPECT_RATIO = 1,
|
||||
// fixed
|
||||
FIXED = 2,
|
||||
};
|
||||
}
|
||||
export const AspectRatioTypeRestriction = type('0 | 1 | 2');
|
||||
export type AspectRatioTypeRestrictionType = typeof AspectRatioTypeRestriction.infer;
|
||||
export type AspectRatioTypeRestrictionType =
|
||||
typeof AspectRatioTypeRestriction.infer;
|
||||
|
||||
export const VideoSchema = type({
|
||||
FlagInterlaced: FlagInterlacedRestriction.default(0),
|
||||
@ -472,14 +491,14 @@ export const VideoSchema = type({
|
||||
StereoMode: StereoModeRestriction.default(0),
|
||||
AlphaMode: AlphaModeRestriction.default(0),
|
||||
OldStereoMode: OldStereoModeRestriction.optional(),
|
||||
PixelWidth: type.number,
|
||||
PixelHeight: type.number,
|
||||
PixelCropBottom: type.number.default(0),
|
||||
PixelCropTop: type.number.default(0),
|
||||
PixelCropLeft: type.number.default(0),
|
||||
PixelCropRight: type.number.default(0),
|
||||
DisplayWidth: type.number.optional(),
|
||||
DisplayHeight: type.number.optional(),
|
||||
PixelWidth: type.number.or(type.bigint),
|
||||
PixelHeight: type.number.or(type.bigint),
|
||||
PixelCropBottom: type.number.or(type.bigint).default(0),
|
||||
PixelCropTop: type.number.or(type.bigint).default(0),
|
||||
PixelCropLeft: type.number.or(type.bigint).default(0),
|
||||
PixelCropRight: type.number.or(type.bigint).default(0),
|
||||
DisplayWidth: type.number.or(type.bigint).optional(),
|
||||
DisplayHeight: type.number.or(type.bigint).optional(),
|
||||
DisplayUnit: DisplayUnitRestriction.default(0),
|
||||
AspectRatioType: AspectRatioTypeRestriction.default(0),
|
||||
UncompressedFourCC: BinarySchema.optional(),
|
||||
@ -518,16 +537,18 @@ export enum EmphasisRestrictionEnum {
|
||||
PHONO_LONDON = 15,
|
||||
// Phono NARTB
|
||||
PHONO_NARTB = 16,
|
||||
};
|
||||
export const EmphasisRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | 13 | 14 | 15 | 16');
|
||||
}
|
||||
export const EmphasisRestriction = type(
|
||||
'0 | 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | 13 | 14 | 15 | 16'
|
||||
);
|
||||
export type EmphasisRestrictionType = typeof EmphasisRestriction.infer;
|
||||
|
||||
export const AudioSchema = type({
|
||||
SamplingFrequency: type.number.default(0),
|
||||
OutputSamplingFrequency: type.number.optional(),
|
||||
Channels: type.number.default(1),
|
||||
Channels: type.number.or(type.bigint).default(1),
|
||||
ChannelPositions: BinarySchema.optional(),
|
||||
BitDepth: type.number.optional(),
|
||||
BitDepth: type.number.or(type.bigint).optional(),
|
||||
Emphasis: EmphasisRestriction.default(0),
|
||||
});
|
||||
|
||||
@ -540,12 +561,13 @@ export enum TrackPlaneTypeRestrictionEnum {
|
||||
RIGHT_EYE = 1,
|
||||
// background
|
||||
BACKGROUND = 2,
|
||||
};
|
||||
}
|
||||
export const TrackPlaneTypeRestriction = type('0 | 1 | 2');
|
||||
export type TrackPlaneTypeRestrictionType = typeof TrackPlaneTypeRestriction.infer;
|
||||
export type TrackPlaneTypeRestrictionType =
|
||||
typeof TrackPlaneTypeRestriction.infer;
|
||||
|
||||
export const TrackPlaneSchema = type({
|
||||
TrackPlaneUID: type.number,
|
||||
TrackPlaneUID: type.number.or(type.bigint),
|
||||
TrackPlaneType: TrackPlaneTypeRestriction,
|
||||
});
|
||||
|
||||
@ -558,7 +580,7 @@ export const TrackCombinePlanesSchema = type({
|
||||
export type TrackCombinePlanesType = typeof TrackCombinePlanesSchema.infer;
|
||||
|
||||
export const TrackJoinBlocksSchema = type({
|
||||
TrackJoinUID: type.number.array().atLeastLength(1),
|
||||
TrackJoinUID: type.number.or(type.bigint).array().atLeastLength(1),
|
||||
});
|
||||
|
||||
export type TrackJoinBlocksType = typeof TrackJoinBlocksSchema.infer;
|
||||
@ -579,9 +601,10 @@ export enum ContentCompAlgoRestrictionEnum {
|
||||
LZO1X = 2,
|
||||
// Header Stripping
|
||||
HEADER_STRIPPING = 3,
|
||||
};
|
||||
}
|
||||
export const ContentCompAlgoRestriction = type('0 | 1 | 2 | 3');
|
||||
export type ContentCompAlgoRestrictionType = typeof ContentCompAlgoRestriction.infer;
|
||||
export type ContentCompAlgoRestrictionType =
|
||||
typeof ContentCompAlgoRestriction.infer;
|
||||
|
||||
export const ContentCompressionSchema = type({
|
||||
ContentCompAlgo: ContentCompAlgoRestriction.default(0),
|
||||
@ -595,15 +618,17 @@ export enum AESSettingsCipherModeRestrictionEnum {
|
||||
AES_CTR = 1,
|
||||
// AES-CBC
|
||||
AES_CBC = 2,
|
||||
};
|
||||
}
|
||||
export const AESSettingsCipherModeRestriction = type('1 | 2');
|
||||
export type AESSettingsCipherModeRestrictionType = typeof AESSettingsCipherModeRestriction.infer;
|
||||
export type AESSettingsCipherModeRestrictionType =
|
||||
typeof AESSettingsCipherModeRestriction.infer;
|
||||
|
||||
export const ContentEncAESSettingsSchema = type({
|
||||
AESSettingsCipherMode: AESSettingsCipherModeRestriction,
|
||||
});
|
||||
|
||||
export type ContentEncAESSettingsType = typeof ContentEncAESSettingsSchema.infer;
|
||||
export type ContentEncAESSettingsType =
|
||||
typeof ContentEncAESSettingsSchema.infer;
|
||||
|
||||
export enum ContentEncAlgoRestrictionEnum {
|
||||
// Not encrypted
|
||||
@ -618,18 +643,20 @@ export enum ContentEncAlgoRestrictionEnum {
|
||||
BLOWFISH = 4,
|
||||
// AES
|
||||
AES = 5,
|
||||
};
|
||||
}
|
||||
export const ContentEncAlgoRestriction = type('0 | 1 | 2 | 3 | 4 | 5');
|
||||
export type ContentEncAlgoRestrictionType = typeof ContentEncAlgoRestriction.infer;
|
||||
export type ContentEncAlgoRestrictionType =
|
||||
typeof ContentEncAlgoRestriction.infer;
|
||||
|
||||
export enum ContentSigAlgoRestrictionEnum {
|
||||
// Not signed
|
||||
NOT_SIGNED = 0,
|
||||
// RSA
|
||||
RSA = 1,
|
||||
};
|
||||
}
|
||||
export const ContentSigAlgoRestriction = type('0 | 1');
|
||||
export type ContentSigAlgoRestrictionType = typeof ContentSigAlgoRestriction.infer;
|
||||
export type ContentSigAlgoRestrictionType =
|
||||
typeof ContentSigAlgoRestriction.infer;
|
||||
|
||||
export enum ContentSigHashAlgoRestrictionEnum {
|
||||
// Not signed
|
||||
@ -638,9 +665,10 @@ export enum ContentSigHashAlgoRestrictionEnum {
|
||||
SHA1_160 = 1,
|
||||
// MD5
|
||||
MD5 = 2,
|
||||
};
|
||||
}
|
||||
export const ContentSigHashAlgoRestriction = type('0 | 1 | 2');
|
||||
export type ContentSigHashAlgoRestrictionType = typeof ContentSigHashAlgoRestriction.infer;
|
||||
export type ContentSigHashAlgoRestrictionType =
|
||||
typeof ContentSigHashAlgoRestriction.infer;
|
||||
|
||||
export const ContentEncryptionSchema = type({
|
||||
ContentEncAlgo: ContentEncAlgoRestriction.default(0),
|
||||
@ -661,21 +689,23 @@ export enum ContentEncodingScopeRestrictionEnum {
|
||||
PRIVATE = 2,
|
||||
// Next
|
||||
NEXT = 4,
|
||||
};
|
||||
}
|
||||
export const ContentEncodingScopeRestriction = type('1 | 2 | 4');
|
||||
export type ContentEncodingScopeRestrictionType = typeof ContentEncodingScopeRestriction.infer;
|
||||
export type ContentEncodingScopeRestrictionType =
|
||||
typeof ContentEncodingScopeRestriction.infer;
|
||||
|
||||
export enum ContentEncodingTypeRestrictionEnum {
|
||||
// Compression
|
||||
COMPRESSION = 0,
|
||||
// Encryption
|
||||
ENCRYPTION = 1,
|
||||
};
|
||||
}
|
||||
export const ContentEncodingTypeRestriction = type('0 | 1');
|
||||
export type ContentEncodingTypeRestrictionType = typeof ContentEncodingTypeRestriction.infer;
|
||||
export type ContentEncodingTypeRestrictionType =
|
||||
typeof ContentEncodingTypeRestriction.infer;
|
||||
|
||||
export const ContentEncodingSchema = type({
|
||||
ContentEncodingOrder: type.number.default(0),
|
||||
ContentEncodingOrder: type.number.or(type.bigint).default(0),
|
||||
ContentEncodingScope: ContentEncodingScopeRestriction.default(1),
|
||||
ContentEncodingType: ContentEncodingTypeRestriction.default(0),
|
||||
ContentCompression: ContentCompressionSchema.optional(),
|
||||
@ -707,53 +737,53 @@ export enum TrackTypeRestrictionEnum {
|
||||
CONTROL = 32,
|
||||
// metadata
|
||||
METADATA = 33,
|
||||
};
|
||||
}
|
||||
export const TrackTypeRestriction = type('1 | 2 | 3 | 16 | 17 | 18 | 32 | 33');
|
||||
export type TrackTypeRestrictionType = typeof TrackTypeRestriction.infer;
|
||||
|
||||
export const TrackEntrySchema = type({
|
||||
TrackNumber: type.number,
|
||||
TrackUID: type.number,
|
||||
TrackNumber: type.number.or(type.bigint),
|
||||
TrackUID: type.number.or(type.bigint),
|
||||
TrackType: TrackTypeRestriction,
|
||||
FlagEnabled: type.number.default(1),
|
||||
FlagDefault: type.number.default(1),
|
||||
FlagForced: type.number.default(0),
|
||||
FlagHearingImpaired: type.number.optional(),
|
||||
FlagVisualImpaired: type.number.optional(),
|
||||
FlagTextDescriptions: type.number.optional(),
|
||||
FlagOriginal: type.number.optional(),
|
||||
FlagCommentary: type.number.optional(),
|
||||
FlagLacing: type.number.default(1),
|
||||
MinCache: type.number.default(0),
|
||||
MaxCache: type.number.optional(),
|
||||
DefaultDuration: type.number.optional(),
|
||||
DefaultDecodedFieldDuration: type.number.optional(),
|
||||
FlagEnabled: type.number.or(type.bigint).default(1),
|
||||
FlagDefault: type.number.or(type.bigint).default(1),
|
||||
FlagForced: type.number.or(type.bigint).default(0),
|
||||
FlagHearingImpaired: type.number.or(type.bigint).optional(),
|
||||
FlagVisualImpaired: type.number.or(type.bigint).optional(),
|
||||
FlagTextDescriptions: type.number.or(type.bigint).optional(),
|
||||
FlagOriginal: type.number.or(type.bigint).optional(),
|
||||
FlagCommentary: type.number.or(type.bigint).optional(),
|
||||
FlagLacing: type.number.or(type.bigint).default(1),
|
||||
MinCache: type.number.or(type.bigint).default(0),
|
||||
MaxCache: type.number.or(type.bigint).optional(),
|
||||
DefaultDuration: type.number.or(type.bigint).optional(),
|
||||
DefaultDecodedFieldDuration: type.number.or(type.bigint).optional(),
|
||||
TrackTimestampScale: type.number.default(0),
|
||||
TrackOffset: type.number.default(0),
|
||||
MaxBlockAdditionID: type.number.default(0),
|
||||
TrackOffset: type.number.or(type.bigint).default(0),
|
||||
MaxBlockAdditionID: type.number.or(type.bigint).default(0),
|
||||
BlockAdditionMapping: BlockAdditionMappingSchema.array().optional(),
|
||||
Name: type.string.optional(),
|
||||
Language: type.string.default("eng"),
|
||||
Language: type.string.default('eng'),
|
||||
LanguageBCP47: type.string.optional(),
|
||||
CodecID: type.string,
|
||||
CodecPrivate: BinarySchema.optional(),
|
||||
CodecName: type.string.optional(),
|
||||
AttachmentLink: type.number.optional(),
|
||||
AttachmentLink: type.number.or(type.bigint).optional(),
|
||||
CodecSettings: type.string.optional(),
|
||||
CodecInfoURL: type.string.array().optional(),
|
||||
CodecDownloadURL: type.string.array().optional(),
|
||||
CodecDecodeAll: type.number.default(1),
|
||||
TrackOverlay: type.number.array().optional(),
|
||||
CodecDelay: type.number.default(0),
|
||||
SeekPreRoll: type.number.default(0),
|
||||
CodecDecodeAll: type.number.or(type.bigint).default(1),
|
||||
TrackOverlay: type.number.or(type.bigint).array().optional(),
|
||||
CodecDelay: type.number.or(type.bigint).default(0),
|
||||
SeekPreRoll: type.number.or(type.bigint).default(0),
|
||||
TrackTranslate: TrackTranslateSchema.array().optional(),
|
||||
Video: VideoSchema.optional(),
|
||||
Audio: AudioSchema.optional(),
|
||||
TrackOperation: TrackOperationSchema.optional(),
|
||||
TrickTrackUID: type.number.optional(),
|
||||
TrickTrackUID: type.number.or(type.bigint).optional(),
|
||||
TrickTrackSegmentUID: BinarySchema.optional(),
|
||||
TrickTrackFlag: type.number.default(0),
|
||||
TrickMasterTrackUID: type.number.optional(),
|
||||
TrickTrackFlag: type.number.or(type.bigint).default(0),
|
||||
TrickMasterTrackUID: type.number.or(type.bigint).optional(),
|
||||
TrickMasterTrackSegmentUID: BinarySchema.optional(),
|
||||
ContentEncodings: ContentEncodingsSchema.optional(),
|
||||
});
|
||||
@ -767,28 +797,28 @@ export const TracksSchema = type({
|
||||
export type TracksType = typeof TracksSchema.infer;
|
||||
|
||||
export const CueReferenceSchema = type({
|
||||
CueRefTime: type.number,
|
||||
CueRefCluster: type.number,
|
||||
CueRefNumber: type.number.default(1),
|
||||
CueRefCodecState: type.number.default(0),
|
||||
CueRefTime: type.number.or(type.bigint),
|
||||
CueRefCluster: type.number.or(type.bigint),
|
||||
CueRefNumber: type.number.or(type.bigint).default(1),
|
||||
CueRefCodecState: type.number.or(type.bigint).default(0),
|
||||
});
|
||||
|
||||
export type CueReferenceType = typeof CueReferenceSchema.infer;
|
||||
|
||||
export const CueTrackPositionsSchema = type({
|
||||
CueTrack: type.number,
|
||||
CueClusterPosition: type.number,
|
||||
CueRelativePosition: type.number.optional(),
|
||||
CueDuration: type.number.optional(),
|
||||
CueBlockNumber: type.number.optional(),
|
||||
CueCodecState: type.number.default(0),
|
||||
CueTrack: type.number.or(type.bigint),
|
||||
CueClusterPosition: type.number.or(type.bigint),
|
||||
CueRelativePosition: type.number.or(type.bigint).optional(),
|
||||
CueDuration: type.number.or(type.bigint).optional(),
|
||||
CueBlockNumber: type.number.or(type.bigint).optional(),
|
||||
CueCodecState: type.number.or(type.bigint).default(0),
|
||||
CueReference: CueReferenceSchema.array().optional(),
|
||||
});
|
||||
|
||||
export type CueTrackPositionsType = typeof CueTrackPositionsSchema.infer;
|
||||
|
||||
export const CuePointSchema = type({
|
||||
CueTime: type.number,
|
||||
CueTime: type.number.or(type.bigint),
|
||||
CueTrackPositions: CueTrackPositionsSchema.array().atLeastLength(1),
|
||||
});
|
||||
|
||||
@ -805,10 +835,10 @@ export const AttachedFileSchema = type({
|
||||
FileName: type.string,
|
||||
FileMediaType: type.string,
|
||||
FileData: BinarySchema,
|
||||
FileUID: type.number,
|
||||
FileUID: type.number.or(type.bigint),
|
||||
FileReferral: BinarySchema.optional(),
|
||||
FileUsedStartTime: type.number.optional(),
|
||||
FileUsedEndTime: type.number.optional(),
|
||||
FileUsedStartTime: type.number.or(type.bigint).optional(),
|
||||
FileUsedEndTime: type.number.or(type.bigint).optional(),
|
||||
});
|
||||
|
||||
export type AttachedFileType = typeof AttachedFileSchema.infer;
|
||||
@ -826,13 +856,110 @@ export const EditionDisplaySchema = type({
|
||||
|
||||
export type EditionDisplayType = typeof EditionDisplaySchema.infer;
|
||||
|
||||
export const EditionEntrySchema = type({
|
||||
EditionUID: type.number.optional(),
|
||||
EditionFlagHidden: type.number.default(0),
|
||||
EditionFlagDefault: type.number.default(0),
|
||||
EditionFlagOrdered: type.number.default(0),
|
||||
EditionDisplay: EditionDisplaySchema.array().optional(),
|
||||
export const ChapterTrackSchema = type({
|
||||
ChapterTrackUID: type.number.or(type.bigint).array().atLeastLength(1),
|
||||
});
|
||||
|
||||
export type ChapterTrackType = typeof ChapterTrackSchema.infer;
|
||||
|
||||
export const ChapLanguageSchema = match({
|
||||
'string[]': (v) => (v.length > 0 ? v : ['eng']),
|
||||
default: () => ['eng'],
|
||||
}).optional();
|
||||
|
||||
export const ChapterDisplaySchema = type({
|
||||
ChapString: type.string,
|
||||
ChapLanguage: ChapLanguageSchema,
|
||||
ChapLanguageBCP47: type.string.array().optional(),
|
||||
ChapCountry: type.string.array().optional(),
|
||||
});
|
||||
|
||||
export type ChapterDisplayType = typeof ChapterDisplaySchema.infer;
|
||||
|
||||
export enum ChapProcessTimeRestrictionEnum {
|
||||
// during the whole chapter
|
||||
DURING_THE_WHOLE_CHAPTER = 0,
|
||||
// before starting playback
|
||||
BEFORE_STARTING_PLAYBACK = 1,
|
||||
// after playback of the chapter
|
||||
AFTER_PLAYBACK_OF_THE_CHAPTER = 2,
|
||||
}
|
||||
export const ChapProcessTimeRestriction = type('0 | 1 | 2');
|
||||
export type ChapProcessTimeRestrictionType =
|
||||
typeof ChapProcessTimeRestriction.infer;
|
||||
|
||||
export const ChapProcessCommandSchema = type({
|
||||
ChapProcessTime: ChapProcessTimeRestriction,
|
||||
ChapProcessData: BinarySchema,
|
||||
});
|
||||
|
||||
export type ChapProcessCommandType = typeof ChapProcessCommandSchema.infer;
|
||||
|
||||
export enum ChapProcessCodecIDRestrictionEnum {
|
||||
// Matroska Script
|
||||
MATROSKA_SCRIPT = 0,
|
||||
// DVD-menu
|
||||
DVD_MENU = 1,
|
||||
}
|
||||
export const ChapProcessCodecIDRestriction = type('0 | 1');
|
||||
export type ChapProcessCodecIDRestrictionType =
|
||||
typeof ChapProcessCodecIDRestriction.infer;
|
||||
|
||||
export const ChapProcessSchema = type({
|
||||
ChapProcessCodecID: ChapProcessCodecIDRestriction.default(0),
|
||||
ChapProcessPrivate: BinarySchema.optional(),
|
||||
ChapProcessCommand: ChapProcessCommandSchema.array().optional(),
|
||||
});
|
||||
|
||||
export type ChapProcessType = typeof ChapProcessSchema.infer;
|
||||
|
||||
export enum ChapterSkipTypeRestrictionEnum {
|
||||
// No Skipping
|
||||
NO_SKIPPING = 0,
|
||||
// Opening Credits
|
||||
OPENING_CREDITS = 1,
|
||||
// End Credits
|
||||
END_CREDITS = 2,
|
||||
// Recap
|
||||
RECAP = 3,
|
||||
// Next Preview
|
||||
NEXT_PREVIEW = 4,
|
||||
// Preview
|
||||
PREVIEW = 5,
|
||||
// Advertisement
|
||||
ADVERTISEMENT = 6,
|
||||
// Intermission
|
||||
INTERMISSION = 7,
|
||||
}
|
||||
export const ChapterSkipTypeRestriction = type('0 | 1 | 2 | 3 | 4 | 5 | 6 | 7');
|
||||
export type ChapterSkipTypeRestrictionType =
|
||||
typeof ChapterSkipTypeRestriction.infer;
|
||||
|
||||
export const ChapterAtomSchema = type({
|
||||
ChapterUID: type.number.or(type.bigint),
|
||||
ChapterStringUID: type.string.optional(),
|
||||
ChapterTimeStart: type.number.or(type.bigint),
|
||||
ChapterTimeEnd: type.number.or(type.bigint).optional(),
|
||||
ChapterFlagHidden: type.number.or(type.bigint).default(0),
|
||||
ChapterFlagEnabled: type.number.or(type.bigint).default(1),
|
||||
ChapterSegmentUUID: BinarySchema.optional(),
|
||||
ChapterSkipType: ChapterSkipTypeRestriction.optional(),
|
||||
ChapterSegmentEditionUID: type.number.or(type.bigint).optional(),
|
||||
ChapterPhysicalEquiv: type.number.or(type.bigint).optional(),
|
||||
ChapterTrack: ChapterTrackSchema.optional(),
|
||||
ChapterDisplay: ChapterDisplaySchema.array().optional(),
|
||||
ChapProcess: ChapProcessSchema.array().optional(),
|
||||
});
|
||||
|
||||
export type ChapterAtomType = typeof ChapterAtomSchema.infer;
|
||||
|
||||
export const EditionEntrySchema = type({
|
||||
EditionUID: type.number.or(type.bigint).optional(),
|
||||
EditionFlagHidden: type.number.or(type.bigint).default(0),
|
||||
EditionFlagDefault: type.number.or(type.bigint).default(0),
|
||||
EditionFlagOrdered: type.number.or(type.bigint).default(0),
|
||||
EditionDisplay: EditionDisplaySchema.array().optional(),
|
||||
ChapterAtom: ChapterAtomSchema.array().atLeastLength(1),
|
||||
});
|
||||
|
||||
export type EditionEntryType = typeof EditionEntrySchema.infer;
|
||||
@ -844,28 +971,24 @@ export const ChaptersSchema = type({
|
||||
export type ChaptersType = typeof ChaptersSchema.infer;
|
||||
|
||||
export const TagTrackUIDSchema = match({
|
||||
"number[]": v => v.length > 0 ? v : [0],
|
||||
"undefined": () => [0],
|
||||
default: "assert"
|
||||
});
|
||||
'(number | bigint)[]': (v) => (v.length > 0 ? v : [0]),
|
||||
default: () => [0],
|
||||
}).optional();
|
||||
|
||||
export const TagEditionUIDSchema = match({
|
||||
"number[]": v => v.length > 0 ? v : [0],
|
||||
"undefined": () => [0],
|
||||
default: "assert"
|
||||
});
|
||||
'(number | bigint)[]': (v) => (v.length > 0 ? v : [0]),
|
||||
default: () => [0],
|
||||
}).optional();
|
||||
|
||||
export const TagChapterUIDSchema = match({
|
||||
"number[]": v => v.length > 0 ? v : [0],
|
||||
"undefined": () => [0],
|
||||
default: "assert"
|
||||
});
|
||||
'(number | bigint)[]': (v) => (v.length > 0 ? v : [0]),
|
||||
default: () => [0],
|
||||
}).optional();
|
||||
|
||||
export const TagAttachmentUIDSchema = match({
|
||||
"number[]": v => v.length > 0 ? v : [0],
|
||||
"undefined": () => [0],
|
||||
default: "assert"
|
||||
});
|
||||
'(number | bigint)[]': (v) => (v.length > 0 ? v : [0]),
|
||||
default: () => [0],
|
||||
}).optional();
|
||||
|
||||
export enum TargetTypeValueRestrictionEnum {
|
||||
// SHOT
|
||||
@ -882,55 +1005,60 @@ export enum TargetTypeValueRestrictionEnum {
|
||||
EDITION_ISSUE_VOLUME_OPUS_SEASON_SEQUEL = 60,
|
||||
// COLLECTION
|
||||
COLLECTION = 70,
|
||||
};
|
||||
export const TargetTypeValueRestriction = type('10 | 20 | 30 | 40 | 50 | 60 | 70');
|
||||
export type TargetTypeValueRestrictionType = typeof TargetTypeValueRestriction.infer;
|
||||
}
|
||||
export const TargetTypeValueRestriction = type(
|
||||
'10 | 20 | 30 | 40 | 50 | 60 | 70'
|
||||
);
|
||||
export type TargetTypeValueRestrictionType =
|
||||
typeof TargetTypeValueRestriction.infer;
|
||||
|
||||
export enum TargetTypeRestrictionEnum {
|
||||
// TargetTypeValue 70
|
||||
COLLECTION = "COLLECTION",
|
||||
COLLECTION = 'COLLECTION',
|
||||
// TargetTypeValue 60
|
||||
EDITION = "EDITION",
|
||||
EDITION = 'EDITION',
|
||||
// TargetTypeValue 60
|
||||
ISSUE = "ISSUE",
|
||||
ISSUE = 'ISSUE',
|
||||
// TargetTypeValue 60
|
||||
VOLUME = "VOLUME",
|
||||
VOLUME = 'VOLUME',
|
||||
// TargetTypeValue 60
|
||||
OPUS = "OPUS",
|
||||
OPUS = 'OPUS',
|
||||
// TargetTypeValue 60
|
||||
SEASON = "SEASON",
|
||||
SEASON = 'SEASON',
|
||||
// TargetTypeValue 60
|
||||
SEQUEL = "SEQUEL",
|
||||
SEQUEL = 'SEQUEL',
|
||||
// TargetTypeValue 50
|
||||
ALBUM = "ALBUM",
|
||||
ALBUM = 'ALBUM',
|
||||
// TargetTypeValue 50
|
||||
OPERA = "OPERA",
|
||||
OPERA = 'OPERA',
|
||||
// TargetTypeValue 50
|
||||
CONCERT = "CONCERT",
|
||||
CONCERT = 'CONCERT',
|
||||
// TargetTypeValue 50
|
||||
MOVIE = "MOVIE",
|
||||
MOVIE = 'MOVIE',
|
||||
// TargetTypeValue 50
|
||||
EPISODE = "EPISODE",
|
||||
EPISODE = 'EPISODE',
|
||||
// TargetTypeValue 40
|
||||
PART = "PART",
|
||||
PART = 'PART',
|
||||
// TargetTypeValue 40
|
||||
SESSION = "SESSION",
|
||||
SESSION = 'SESSION',
|
||||
// TargetTypeValue 30
|
||||
TRACK = "TRACK",
|
||||
TRACK = 'TRACK',
|
||||
// TargetTypeValue 30
|
||||
SONG = "SONG",
|
||||
SONG = 'SONG',
|
||||
// TargetTypeValue 30
|
||||
CHAPTER = "CHAPTER",
|
||||
CHAPTER = 'CHAPTER',
|
||||
// TargetTypeValue 20
|
||||
SUBTRACK = "SUBTRACK",
|
||||
SUBTRACK = 'SUBTRACK',
|
||||
// TargetTypeValue 20
|
||||
MOVEMENT = "MOVEMENT",
|
||||
MOVEMENT = 'MOVEMENT',
|
||||
// TargetTypeValue 20
|
||||
SCENE = "SCENE",
|
||||
SCENE = 'SCENE',
|
||||
// TargetTypeValue 10
|
||||
SHOT = "SHOT",
|
||||
};
|
||||
export const TargetTypeRestriction = type('"COLLECTION" | "EDITION" | "ISSUE" | "VOLUME" | "OPUS" | "SEASON" | "SEQUEL" | "ALBUM" | "OPERA" | "CONCERT" | "MOVIE" | "EPISODE" | "PART" | "SESSION" | "TRACK" | "SONG" | "CHAPTER" | "SUBTRACK" | "MOVEMENT" | "SCENE" | "SHOT"');
|
||||
SHOT = 'SHOT',
|
||||
}
|
||||
export const TargetTypeRestriction = type(
|
||||
'"COLLECTION" | "EDITION" | "ISSUE" | "VOLUME" | "OPUS" | "SEASON" | "SEQUEL" | "ALBUM" | "OPERA" | "CONCERT" | "MOVIE" | "EPISODE" | "PART" | "SESSION" | "TRACK" | "SONG" | "CHAPTER" | "SUBTRACK" | "MOVEMENT" | "SCENE" | "SHOT"'
|
||||
);
|
||||
export type TargetTypeRestrictionType = typeof TargetTypeRestriction.infer;
|
||||
|
||||
export const TargetsSchema = type({
|
||||
@ -944,9 +1072,21 @@ export const TargetsSchema = type({
|
||||
|
||||
export type TargetsType = typeof TargetsSchema.infer;
|
||||
|
||||
export const SimpleTagSchema = type({
|
||||
TagName: type.string,
|
||||
TagLanguage: type.string.default('und'),
|
||||
TagLanguageBCP47: type.string.optional(),
|
||||
TagDefault: type.number.or(type.bigint).default(1),
|
||||
TagDefaultBogus: type.number.or(type.bigint).default(1),
|
||||
TagString: type.string.optional(),
|
||||
TagBinary: BinarySchema.optional(),
|
||||
});
|
||||
|
||||
export type SimpleTagType = typeof SimpleTagSchema.infer;
|
||||
|
||||
export const TagSchema = type({
|
||||
Targets: TargetsSchema,
|
||||
|
||||
SimpleTag: SimpleTagSchema.array().atLeastLength(1),
|
||||
});
|
||||
|
||||
export type TagType = typeof TagSchema.infer;
|
||||
@ -998,14 +1138,23 @@ export const IdMultiSet = new Set([
|
||||
EbmlTagIdEnum.CuePoint,
|
||||
EbmlTagIdEnum.AttachedFile,
|
||||
EbmlTagIdEnum.EditionLanguageIETF,
|
||||
EbmlTagIdEnum.ChapterTrackUID,
|
||||
EbmlTagIdEnum.ChapLanguage,
|
||||
EbmlTagIdEnum.ChapLanguageBCP47,
|
||||
EbmlTagIdEnum.ChapCountry,
|
||||
EbmlTagIdEnum.ChapProcessCommand,
|
||||
EbmlTagIdEnum.ChapterDisplay,
|
||||
EbmlTagIdEnum.ChapProcess,
|
||||
EbmlTagIdEnum.EditionDisplay,
|
||||
EbmlTagIdEnum.ChapterAtom,
|
||||
EbmlTagIdEnum.EditionEntry,
|
||||
EbmlTagIdEnum.TagTrackUID,
|
||||
EbmlTagIdEnum.TagEditionUID,
|
||||
EbmlTagIdEnum.TagChapterUID,
|
||||
EbmlTagIdEnum.TagAttachmentUID,
|
||||
EbmlTagIdEnum.SimpleTag,
|
||||
EbmlTagIdEnum.Tag,
|
||||
EbmlTagIdEnum.SeekHead,
|
||||
EbmlTagIdEnum.Cluster,
|
||||
EbmlTagIdEnum.Tags
|
||||
])
|
||||
EbmlTagIdEnum.Tags,
|
||||
]);
|
100
packages/matroska/src/systems/cluster.ts
Normal file
100
packages/matroska/src/systems/cluster.ts
Normal file
@ -0,0 +1,100 @@
|
||||
import type { EbmlClusterTagType } from 'konoebml';
|
||||
import {
|
||||
ClusterSchema,
|
||||
type SimpleBlockType,
|
||||
type ClusterType,
|
||||
type BlockGroupType,
|
||||
type TrackEntryType,
|
||||
} from '../schema';
|
||||
import { type SegmentComponent } from './segment';
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
|
||||
export abstract class BlockViewTrait {
|
||||
abstract get keyframe(): boolean;
|
||||
|
||||
abstract get frames(): Uint8Array[];
|
||||
|
||||
abstract get trackNum(): number | bigint;
|
||||
|
||||
abstract get relTime(): number;
|
||||
}
|
||||
|
||||
export class SimpleBlockView extends BlockViewTrait {
|
||||
constructor(public readonly block: SimpleBlockType) {
|
||||
super();
|
||||
}
|
||||
|
||||
get keyframe() {
|
||||
return !!this.block.keyframe;
|
||||
}
|
||||
|
||||
get frames(): Uint8Array<ArrayBufferLike>[] {
|
||||
return this.block.frames;
|
||||
}
|
||||
|
||||
get trackNum() {
|
||||
return this.block.track;
|
||||
}
|
||||
|
||||
get relTime() {
|
||||
return this.block.value;
|
||||
}
|
||||
}
|
||||
|
||||
export class BlockGroupView extends BlockViewTrait {
|
||||
constructor(public readonly block: BlockGroupType) {
|
||||
super();
|
||||
}
|
||||
|
||||
get keyframe() {
|
||||
return !this.block.ReferenceBlock;
|
||||
}
|
||||
|
||||
get frames(): Uint8Array<ArrayBufferLike>[] {
|
||||
return this.block.Block.frames;
|
||||
}
|
||||
get trackNum() {
|
||||
return this.block.Block.track;
|
||||
}
|
||||
|
||||
get relTime() {
|
||||
return this.block.Block.value;
|
||||
}
|
||||
}
|
||||
|
||||
export class ClusterSystem extends SegmentComponentSystemTrait<
|
||||
EbmlClusterTagType,
|
||||
typeof ClusterSchema
|
||||
> {
|
||||
override get schema() {
|
||||
return ClusterSchema;
|
||||
}
|
||||
|
||||
clustersBuffer: SegmentComponent<ClusterType>[] = [];
|
||||
|
||||
addClusterWithTag(tag: EbmlClusterTagType) {
|
||||
const cluster = this.componentFromTag(tag);
|
||||
// this.clustersBuffer.push(cluster);
|
||||
return cluster;
|
||||
}
|
||||
|
||||
*enumerateBlocks(
|
||||
cluster: ClusterType,
|
||||
track: TrackEntryType
|
||||
): Generator<BlockViewTrait> {
|
||||
if (cluster.SimpleBlock) {
|
||||
for (const block of cluster.SimpleBlock) {
|
||||
if (block.track === track.TrackNumber) {
|
||||
yield new SimpleBlockView(block);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (cluster.BlockGroup) {
|
||||
for (const block of cluster.BlockGroup) {
|
||||
if (block.Block.track === track.TrackNumber) {
|
||||
yield new BlockGroupView(block);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
85
packages/matroska/src/systems/cue.ts
Normal file
85
packages/matroska/src/systems/cue.ts
Normal file
@ -0,0 +1,85 @@
|
||||
import {type EbmlCuePointTagType, type EbmlCuesTagType, EbmlTagIdEnum} from "konoebml";
|
||||
import {CuePointSchema, type CuePointType, type CueTrackPositionsType} from "../schema";
|
||||
import {maxBy} from "lodash-es";
|
||||
import type {SegmentComponent} from "./segment";
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
|
||||
export class CueSystem extends SegmentComponentSystemTrait<
|
||||
EbmlCuePointTagType,
|
||||
typeof CuePointSchema
|
||||
> {
|
||||
override get schema() {
|
||||
return CuePointSchema;
|
||||
}
|
||||
|
||||
cues: SegmentComponent<CuePointType>[] = [];
|
||||
|
||||
prepareCuesWithTag(tag: EbmlCuesTagType) {
|
||||
this.cues = tag.children
|
||||
.filter((c) => c.id === EbmlTagIdEnum.CuePoint)
|
||||
.map(this.componentFromTag.bind(this));
|
||||
return this;
|
||||
}
|
||||
|
||||
findClosestCue(seekTime: number): CuePointType | undefined {
|
||||
const cues = this.cues;
|
||||
if (!cues || cues.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let left = 0;
|
||||
let right = cues.length - 1;
|
||||
|
||||
if (seekTime <= cues[0].CueTime) {
|
||||
return cues[0];
|
||||
}
|
||||
|
||||
if (seekTime >= cues[right].CueTime) {
|
||||
return cues[right];
|
||||
}
|
||||
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
|
||||
if (cues[mid].CueTime === seekTime) {
|
||||
return cues[mid];
|
||||
}
|
||||
|
||||
if (cues[mid].CueTime < seekTime) {
|
||||
left = mid + 1;
|
||||
} else {
|
||||
right = mid - 1;
|
||||
}
|
||||
}
|
||||
|
||||
const before = cues[right];
|
||||
const after = cues[left];
|
||||
return Math.abs((before.CueTime as number) - seekTime) <
|
||||
Math.abs((after.CueTime as number) - seekTime)
|
||||
? before
|
||||
: after;
|
||||
}
|
||||
|
||||
getCueTrackPositions(
|
||||
cuePoint: CuePointType,
|
||||
track?: number
|
||||
): CueTrackPositionsType {
|
||||
let cueTrackPositions: CueTrackPositionsType | undefined;
|
||||
if (track! >= 0) {
|
||||
cueTrackPositions = cuePoint.CueTrackPositions.find(
|
||||
(c) => c.CueTrack === track
|
||||
);
|
||||
}
|
||||
if (!cueTrackPositions) {
|
||||
cueTrackPositions = maxBy(
|
||||
cuePoint.CueTrackPositions,
|
||||
(c) => c.CueClusterPosition
|
||||
)!;
|
||||
}
|
||||
return cueTrackPositions;
|
||||
}
|
||||
|
||||
get prepared(): boolean {
|
||||
return this.cues.length > 0;
|
||||
}
|
||||
}
|
8
packages/matroska/src/systems/index.ts
Normal file
8
packages/matroska/src/systems/index.ts
Normal file
@ -0,0 +1,8 @@
|
||||
export { TrackContext, AudioTrackContext, VideoTrackContext, DefaultTrackContext, type GetTrackEntryOptions, TrackSystem } from './track';
|
||||
export { CueSystem } from './cue';
|
||||
export { TagSystem } from './tag';
|
||||
export { ClusterSystem } from './cluster';
|
||||
export { InfoSystem } from './info';
|
||||
export { type SegmentComponent, SegmentSystem, withSegment } from './segment';
|
||||
export { SeekSystem, SEEK_ID_KAX_CUES, SEEK_ID_KAX_INFO, SEEK_ID_KAX_TAGS, SEEK_ID_KAX_TRACKS } from './seek';
|
||||
export {SegmentComponentSystemTrait} from "./segment-component";
|
20
packages/matroska/src/systems/info.ts
Normal file
20
packages/matroska/src/systems/info.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import type {EbmlInfoTagType} from "konoebml";
|
||||
import {InfoSchema, type InfoType} from "../schema";
|
||||
import type {SegmentComponent} from "./segment";
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
|
||||
export class InfoSystem extends SegmentComponentSystemTrait<
|
||||
EbmlInfoTagType,
|
||||
typeof InfoSchema
|
||||
> {
|
||||
override get schema() {
|
||||
return InfoSchema;
|
||||
}
|
||||
|
||||
info!: SegmentComponent<InfoType>;
|
||||
|
||||
prepareWithInfoTag(tag: EbmlInfoTagType) {
|
||||
this.info = this.componentFromTag(tag);
|
||||
return this;
|
||||
}
|
||||
}
|
64
packages/matroska/src/systems/seek.ts
Normal file
64
packages/matroska/src/systems/seek.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import type {EbmlSeekHeadTagType, EbmlTagType} from "konoebml";
|
||||
import {SeekHeadSchema, type SeekHeadType} from "../schema";
|
||||
import {isEqual} from "lodash-es";
|
||||
import {UnreachableOrLogicError} from "@konoplayer/core/errors";
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
|
||||
export const SEEK_ID_KAX_INFO = new Uint8Array([0x15, 0x49, 0xa9, 0x66]);
|
||||
export const SEEK_ID_KAX_TRACKS = new Uint8Array([0x16, 0x54, 0xae, 0x6b]);
|
||||
export const SEEK_ID_KAX_CUES = new Uint8Array([0x1c, 0x53, 0xbb, 0x6b]);
|
||||
export const SEEK_ID_KAX_TAGS = new Uint8Array([0x12, 0x54, 0xc3, 0x67]);
|
||||
|
||||
export class SeekSystem extends SegmentComponentSystemTrait<
|
||||
EbmlSeekHeadTagType,
|
||||
typeof SeekHeadSchema
|
||||
> {
|
||||
override get schema() {
|
||||
return SeekHeadSchema;
|
||||
}
|
||||
|
||||
seekHeads: SeekHeadType[] = [];
|
||||
private offsetToTagMemo: Map<number, EbmlTagType> = new Map();
|
||||
|
||||
memoOffset(tag: EbmlTagType) {
|
||||
this.offsetToTagMemo.set(tag.startOffset, tag);
|
||||
}
|
||||
|
||||
addSeekHeadTag(tag: EbmlSeekHeadTagType) {
|
||||
const seekHead = this.componentFromTag(tag);
|
||||
this.seekHeads.push(seekHead);
|
||||
return seekHead;
|
||||
}
|
||||
|
||||
offsetFromSeekPosition(position: number): number {
|
||||
return position + this.segment.contentStartOffset;
|
||||
}
|
||||
|
||||
seekTagByStartOffset(
|
||||
startOffset: number | undefined
|
||||
): EbmlTagType | undefined {
|
||||
return startOffset! >= 0
|
||||
? this.offsetToTagMemo.get(startOffset!)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
seekOffsetBySeekId(seekId: Uint8Array): number | undefined {
|
||||
const seekPosition = this.seekHeads[0]?.Seek?.find((c) =>
|
||||
isEqual(c.SeekID, seekId)
|
||||
)?.SeekPosition;
|
||||
return seekPosition! >= 0
|
||||
? this.offsetFromSeekPosition(seekPosition! as number)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
seekTagBySeekId(seekId: Uint8Array): EbmlTagType | undefined {
|
||||
return this.seekTagByStartOffset(this.seekOffsetBySeekId(seekId));
|
||||
}
|
||||
|
||||
get firstClusterOffset() {
|
||||
if (!this.segment.firstCluster) {
|
||||
throw new UnreachableOrLogicError('first cluster not found');
|
||||
}
|
||||
return this.segment.firstCluster.startOffset;
|
||||
}
|
||||
}
|
37
packages/matroska/src/systems/segment-component.ts
Normal file
37
packages/matroska/src/systems/segment-component.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import type {EbmlMasterTagType} from "konoebml";
|
||||
import {ArkErrors, type Type} from "arktype";
|
||||
import {convertEbmlTagToComponent, type InferType} from "../util";
|
||||
import type {SegmentComponent, SegmentSystem} from "./segment";
|
||||
|
||||
export class SegmentComponentSystemTrait<
|
||||
E extends EbmlMasterTagType,
|
||||
S extends Type<any>,
|
||||
> {
|
||||
segment: SegmentSystem;
|
||||
|
||||
get schema(): S {
|
||||
throw new Error('unimplemented!');
|
||||
}
|
||||
|
||||
constructor(segment: SegmentSystem) {
|
||||
this.segment = segment;
|
||||
}
|
||||
|
||||
componentFromTag(tag: E): SegmentComponent<InferType<S>> {
|
||||
const extracted = convertEbmlTagToComponent(tag);
|
||||
const result = this.schema(extracted) as
|
||||
| (InferType<S> & { segment: SegmentSystem })
|
||||
| ArkErrors;
|
||||
if (result instanceof ArkErrors) {
|
||||
const errors = result;
|
||||
console.error(
|
||||
'Parse component from tag error:',
|
||||
tag.toDebugRecord(),
|
||||
errors.flatProblemsByPath
|
||||
);
|
||||
throw errors;
|
||||
}
|
||||
result.segment = this.segment;
|
||||
return result;
|
||||
}
|
||||
}
|
124
packages/matroska/src/systems/segment.ts
Normal file
124
packages/matroska/src/systems/segment.ts
Normal file
@ -0,0 +1,124 @@
|
||||
import {
|
||||
type EbmlClusterTagType,
|
||||
type EbmlSegmentTagType,
|
||||
EbmlTagIdEnum,
|
||||
EbmlTagPosition,
|
||||
type EbmlTagType
|
||||
} from "konoebml";
|
||||
import {convertEbmlTagToComponent} from "../util";
|
||||
import {CueSystem} from "./cue";
|
||||
import {ClusterSystem} from "./cluster";
|
||||
import {SEEK_ID_KAX_CUES, SEEK_ID_KAX_INFO, SEEK_ID_KAX_TAGS, SEEK_ID_KAX_TRACKS, SeekSystem} from "./seek";
|
||||
import {InfoSystem} from "./info";
|
||||
import {TrackSystem} from "./track";
|
||||
import {TagSystem} from "./tag";
|
||||
import type {BlockGroupType} from "../schema";
|
||||
|
||||
export class SegmentSystem {
|
||||
startTag: EbmlSegmentTagType;
|
||||
metaTags: EbmlTagType[] = [];
|
||||
firstCluster: EbmlClusterTagType | undefined;
|
||||
|
||||
cue: CueSystem;
|
||||
cluster: ClusterSystem;
|
||||
seek: SeekSystem;
|
||||
info: InfoSystem;
|
||||
track: TrackSystem;
|
||||
tag: TagSystem;
|
||||
|
||||
constructor(startNode: EbmlSegmentTagType) {
|
||||
this.startTag = startNode;
|
||||
this.cue = new CueSystem(this);
|
||||
this.cluster = new ClusterSystem(this);
|
||||
this.seek = new SeekSystem(this);
|
||||
this.info = new InfoSystem(this);
|
||||
this.track = new TrackSystem(this);
|
||||
this.tag = new TagSystem(this);
|
||||
}
|
||||
|
||||
get contentStartOffset() {
|
||||
return this.startTag.startOffset + this.startTag.headerLength;
|
||||
}
|
||||
|
||||
private seekLocal() {
|
||||
const infoTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_INFO);
|
||||
const tracksTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_TRACKS);
|
||||
const cuesTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_CUES);
|
||||
const tagsTag = this.seek.seekTagBySeekId(SEEK_ID_KAX_TAGS);
|
||||
|
||||
if (cuesTag?.id === EbmlTagIdEnum.Cues) {
|
||||
this.cue.prepareCuesWithTag(cuesTag);
|
||||
}
|
||||
if (infoTag?.id === EbmlTagIdEnum.Info) {
|
||||
this.info.prepareWithInfoTag(infoTag);
|
||||
}
|
||||
if (tracksTag?.id === EbmlTagIdEnum.Tracks) {
|
||||
this.track.prepareTracksWithTag(tracksTag);
|
||||
}
|
||||
if (tagsTag?.id === EbmlTagIdEnum.Tags) {
|
||||
this.tag.prepareTagsWithTag(tagsTag);
|
||||
}
|
||||
}
|
||||
|
||||
scanMeta(tag: EbmlTagType) {
|
||||
if (
|
||||
tag.id === EbmlTagIdEnum.SeekHead &&
|
||||
tag.position === EbmlTagPosition.End
|
||||
) {
|
||||
this.seek.addSeekHeadTag(tag);
|
||||
}
|
||||
this.metaTags.push(tag);
|
||||
if (tag.position !== EbmlTagPosition.Start) {
|
||||
this.seek.memoOffset(tag);
|
||||
}
|
||||
if (tag.id === EbmlTagIdEnum.Cluster && !this.firstCluster) {
|
||||
this.firstCluster = tag;
|
||||
this.seekLocal();
|
||||
}
|
||||
if (this.firstCluster) {
|
||||
if (tag.id === EbmlTagIdEnum.SimpleBlock && tag.keyframe) {
|
||||
this.track.tryPeekKeyframe(tag);
|
||||
} else if (tag.id === EbmlTagIdEnum.BlockGroup) {
|
||||
const blockGroup = convertEbmlTagToComponent(tag) as BlockGroupType;
|
||||
// keep frame
|
||||
if (blockGroup && !blockGroup.ReferenceBlock && blockGroup.Block) {
|
||||
this.track.tryPeekKeyframe(blockGroup.Block);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
canCompleteMeta() {
|
||||
const lastTag = this.metaTags.at(-1);
|
||||
if (!lastTag) {
|
||||
return false;
|
||||
}
|
||||
if (lastTag.id === EbmlTagIdEnum.Segment && lastTag.position === EbmlTagPosition.End) {
|
||||
return true;
|
||||
}
|
||||
return (!!this.firstCluster && this.track.preparedToConfigureTracks());
|
||||
}
|
||||
|
||||
async completeMeta() {
|
||||
this.seekLocal();
|
||||
|
||||
await this.track.buildTracksConfiguration();
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export type SegmentComponent<T> = T & {
|
||||
get segment(): SegmentSystem;
|
||||
};
|
||||
|
||||
export function withSegment<T extends object>(
|
||||
component: T,
|
||||
segment: SegmentSystem
|
||||
): SegmentComponent<T> {
|
||||
const component_ = component as T & { segment: SegmentSystem };
|
||||
component_.segment = segment;
|
||||
return component_;
|
||||
}
|
||||
|
27
packages/matroska/src/systems/tag.ts
Normal file
27
packages/matroska/src/systems/tag.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import {EbmlTagIdEnum, type EbmlTagsTagType, type EbmlTagTagType} from "konoebml";
|
||||
import {TagSchema, type TagType} from "../schema";
|
||||
|
||||
import type {SegmentComponent} from "./segment";
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
|
||||
export class TagSystem extends SegmentComponentSystemTrait<
|
||||
EbmlTagTagType,
|
||||
typeof TagSchema
|
||||
> {
|
||||
override get schema() {
|
||||
return TagSchema;
|
||||
}
|
||||
|
||||
tags: SegmentComponent<TagType>[] = [];
|
||||
|
||||
prepareTagsWithTag(tag: EbmlTagsTagType) {
|
||||
this.tags = tag.children
|
||||
.filter((c) => c.id === EbmlTagIdEnum.Tag)
|
||||
.map((c) => this.componentFromTag(c));
|
||||
return this;
|
||||
}
|
||||
|
||||
get prepared(): boolean {
|
||||
return this.tags.length > 0;
|
||||
}
|
||||
}
|
229
packages/matroska/src/systems/track.ts
Normal file
229
packages/matroska/src/systems/track.ts
Normal file
@ -0,0 +1,229 @@
|
||||
import {
|
||||
ParseCodecErrors,
|
||||
UnsupportedCodecError,
|
||||
} from '@konoplayer/core/errors';
|
||||
import {
|
||||
EbmlTagIdEnum,
|
||||
type EbmlTrackEntryTagType,
|
||||
type EbmlTracksTagType,
|
||||
} from 'konoebml';
|
||||
import {
|
||||
audioCodecIdToWebCodecs,
|
||||
videoCodecIdRequirePeekingKeyframe,
|
||||
videoCodecIdToWebCodecs,
|
||||
type AudioDecoderConfigExt,
|
||||
type VideoDecoderConfigExt,
|
||||
} from '../codecs';
|
||||
import {
|
||||
TrackEntrySchema,
|
||||
type TrackEntryType,
|
||||
TrackTypeRestrictionEnum,
|
||||
} from '../schema';
|
||||
import type { SegmentComponent } from './segment';
|
||||
import {SegmentComponentSystemTrait} from "./segment-component";
|
||||
import {pick} from "lodash-es";
|
||||
|
||||
export interface GetTrackEntryOptions {
|
||||
priority?: (v: SegmentComponent<TrackEntryType>) => number;
|
||||
predicate: (v: SegmentComponent<TrackEntryType>) => boolean;
|
||||
}
|
||||
|
||||
export abstract class TrackContext {
|
||||
peekingKeyframe?: Uint8Array;
|
||||
trackEntry: TrackEntryType;
|
||||
timestampScale: number;
|
||||
lastBlockTimestamp = Number.NaN;
|
||||
averageBlockDuration = Number.NaN;
|
||||
|
||||
constructor(trackEntry: TrackEntryType, timestampScale: number) {
|
||||
this.trackEntry = trackEntry;
|
||||
this.timestampScale = Number(timestampScale);
|
||||
}
|
||||
|
||||
peekKeyframe(payload: Uint8Array) {
|
||||
this.peekingKeyframe = payload;
|
||||
}
|
||||
|
||||
preparedToConfigure() {
|
||||
if (this.requirePeekKeyframe()) {
|
||||
return !!this.peekingKeyframe;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
abstract requirePeekKeyframe(): boolean;
|
||||
|
||||
abstract buildConfiguration(): Promise<void>;
|
||||
|
||||
predictBlockDuration(blockTimestamp: number): number {
|
||||
if (this.trackEntry.DefaultDuration) {
|
||||
return Number(this.trackEntry.DefaultDuration);
|
||||
}
|
||||
const delta = blockTimestamp - this.lastBlockTimestamp;
|
||||
this.lastBlockTimestamp = blockTimestamp;
|
||||
this.averageBlockDuration = this.averageBlockDuration
|
||||
? this.averageBlockDuration * 0.5 + delta * 0.5
|
||||
: delta;
|
||||
return this.averageBlockDuration;
|
||||
}
|
||||
}
|
||||
|
||||
export class DefaultTrackContext extends TrackContext {
|
||||
override requirePeekKeyframe(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
// biome-ignore lint/suspicious/noEmptyBlockStatements: <explanation>
|
||||
override async buildConfiguration(): Promise<void> {}
|
||||
}
|
||||
|
||||
export class VideoTrackContext extends TrackContext {
|
||||
configuration!: VideoDecoderConfigExt;
|
||||
|
||||
override requirePeekKeyframe(): boolean {
|
||||
return videoCodecIdRequirePeekingKeyframe(this.trackEntry.CodecID);
|
||||
}
|
||||
|
||||
async buildConfiguration() {
|
||||
const configuration = videoCodecIdToWebCodecs(
|
||||
this.trackEntry,
|
||||
this.peekingKeyframe
|
||||
);
|
||||
const checkResult = await VideoDecoder?.isConfigSupported?.(configuration);
|
||||
if (!checkResult?.supported) {
|
||||
throw new UnsupportedCodecError(configuration.codec, 'video decoder');
|
||||
}
|
||||
this.configuration = configuration;
|
||||
}
|
||||
}
|
||||
|
||||
export class AudioTrackContext extends TrackContext {
|
||||
configuration!: AudioDecoderConfigExt;
|
||||
|
||||
override requirePeekKeyframe(): boolean {
|
||||
return videoCodecIdRequirePeekingKeyframe(this.trackEntry.CodecID);
|
||||
}
|
||||
|
||||
async buildConfiguration() {
|
||||
const configuration = audioCodecIdToWebCodecs(
|
||||
this.trackEntry,
|
||||
this.peekingKeyframe
|
||||
);
|
||||
const checkResult = await AudioDecoder?.isConfigSupported?.(configuration);
|
||||
if (!checkResult?.supported) {
|
||||
throw new UnsupportedCodecError(configuration.codec, 'audio decoder');
|
||||
}
|
||||
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
override predictBlockDuration(blockTimestamp: number): number {
|
||||
if (this.trackEntry.DefaultDuration) {
|
||||
return Number(this.trackEntry.DefaultDuration);
|
||||
}
|
||||
if (this.configuration.samplesPerFrame) {
|
||||
return (
|
||||
Number(
|
||||
this.configuration.samplesPerFrame / this.configuration.sampleRate
|
||||
) * this.timestampScale
|
||||
);
|
||||
}
|
||||
const delta = blockTimestamp - this.lastBlockTimestamp;
|
||||
this.lastBlockTimestamp = blockTimestamp;
|
||||
this.averageBlockDuration = this.averageBlockDuration
|
||||
? this.averageBlockDuration * 0.5 + delta * 0.5
|
||||
: delta;
|
||||
return this.averageBlockDuration;
|
||||
}
|
||||
}
|
||||
|
||||
export function standardTrackPredicate(track: TrackEntryType) {
|
||||
return track.FlagEnabled !== 0;
|
||||
}
|
||||
|
||||
export function standardTrackPriority(track: TrackEntryType) {
|
||||
return (Number(!!track.FlagForced) << 8) + (Number(!!track.FlagDefault) << 4);
|
||||
}
|
||||
|
||||
export class TrackSystem extends SegmentComponentSystemTrait<
|
||||
EbmlTrackEntryTagType,
|
||||
typeof TrackEntrySchema
|
||||
> {
|
||||
override get schema() {
|
||||
return TrackEntrySchema;
|
||||
}
|
||||
|
||||
tracks: SegmentComponent<TrackEntryType>[] = [];
|
||||
trackContexts: Map<number | bigint, TrackContext> = new Map();
|
||||
|
||||
getTrackEntry({
|
||||
priority = standardTrackPriority,
|
||||
predicate,
|
||||
}: GetTrackEntryOptions) {
|
||||
return this.tracks
|
||||
.filter(predicate)
|
||||
.toSorted((a, b) => priority(b) - priority(a))
|
||||
.at(0);
|
||||
}
|
||||
|
||||
getTrackContext<T extends TrackContext>(
|
||||
options: GetTrackEntryOptions
|
||||
): T | undefined {
|
||||
const trackEntry = this.getTrackEntry(options);
|
||||
const trackNum = trackEntry?.TrackNumber!;
|
||||
return this.trackContexts.get(trackNum) as T | undefined;
|
||||
}
|
||||
|
||||
prepareTracksWithTag(tag: EbmlTracksTagType) {
|
||||
const infoSystem = this.segment.info;
|
||||
this.tracks = tag.children
|
||||
.filter((c) => c.id === EbmlTagIdEnum.TrackEntry)
|
||||
.map((c) => this.componentFromTag(c));
|
||||
for (const track of this.tracks) {
|
||||
if (track.TrackType === TrackTypeRestrictionEnum.VIDEO) {
|
||||
this.trackContexts.set(
|
||||
track.TrackNumber,
|
||||
new VideoTrackContext(track, Number(infoSystem.info.TimestampScale))
|
||||
);
|
||||
} else if (track.TrackType === TrackTypeRestrictionEnum.AUDIO) {
|
||||
this.trackContexts.set(
|
||||
track.TrackNumber,
|
||||
new AudioTrackContext(track, Number(infoSystem.info.TimestampScale))
|
||||
);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
async buildTracksConfiguration() {
|
||||
const parseErrors = new ParseCodecErrors();
|
||||
|
||||
for (const context of this.trackContexts.values()) {
|
||||
try {
|
||||
await context.buildConfiguration();
|
||||
} catch (e) {
|
||||
parseErrors.cause.push(e as Error);
|
||||
}
|
||||
}
|
||||
if (parseErrors.cause.length > 0) {
|
||||
console.error(parseErrors, parseErrors.cause);
|
||||
}
|
||||
}
|
||||
|
||||
tryPeekKeyframe(tag: { track: number | bigint; frames: Uint8Array[] }) {
|
||||
for (const c of this.trackContexts.values()) {
|
||||
if (c.trackEntry.TrackNumber === tag.track) {
|
||||
c.peekKeyframe(tag.frames?.[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preparedToConfigureTracks(): boolean {
|
||||
for (const c of this.trackContexts.values()) {
|
||||
if (!c.preparedToConfigure()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
@ -27,7 +27,7 @@ export function isTagPos<
|
||||
pos === '*' || pos === tag.position;
|
||||
}
|
||||
|
||||
export function convertEbmlTagToComponent (tag: EbmlTagType) {
|
||||
export function convertEbmlTagToComponent(tag: EbmlTagType) {
|
||||
if (tag.type === EbmlElementType.Master) {
|
||||
const obj: Record<string, any> = {};
|
||||
const children = tag.children;
|
||||
@ -51,3 +51,12 @@ export function convertEbmlTagToComponent (tag: EbmlTagType) {
|
||||
}
|
||||
return tag.data;
|
||||
}
|
||||
|
||||
export function waitTick() {
|
||||
return new Promise<void>((resolve) => {
|
||||
const timeout = setTimeout(() => {
|
||||
resolve();
|
||||
timeout && clearTimeout(timeout);
|
||||
}, 0);
|
||||
});
|
||||
}
|
20
packages/matroska/tsconfig.json
Normal file
20
packages/matroska/tsconfig.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"outDir": "./dist",
|
||||
"paths": {
|
||||
"@konoplayer/core/*": [
|
||||
"../core/src/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../core"
|
||||
}
|
||||
]
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "demuxing"
|
||||
name = "konoplayer-symphonia"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
symphonia-format-mkv = "0.5.4"
|
||||
symphonia = "0.5.4"
|
1090
pnpm-lock.yaml
generated
1090
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -43,15 +43,15 @@ const EbmlTypeMetas = {
|
||||
},
|
||||
uinteger: {
|
||||
code: 'Uint',
|
||||
primitive: () => 'type.number',
|
||||
primitive: () => 'type.number.or(type.bigint)',
|
||||
default: (d: string): string => d,
|
||||
primitiveStr: () => 'number',
|
||||
primitiveStr: () => '(number | bigint)',
|
||||
},
|
||||
integer: {
|
||||
code: 'Int',
|
||||
primitive: () => 'type.number',
|
||||
primitive: () => 'type.number.or(type.bigint)',
|
||||
default: (d: string) => d,
|
||||
primitiveStr: () => 'number',
|
||||
primitiveStr: () => '(number | bigint)',
|
||||
},
|
||||
float: {
|
||||
code: 'Float',
|
||||
@ -160,7 +160,7 @@ function extractElement(element: Element) {
|
||||
);
|
||||
|
||||
assert(typeof path_ === 'string', `path of ${name} is not string ${element}`);
|
||||
const path = path_.split('\\').filter(Boolean);
|
||||
const path = path_.replace(/\\\+/g, '\\').split('\\').filter(Boolean);
|
||||
const parentPath = path.at(-2);
|
||||
const prefix = path.slice(0, -1);
|
||||
const level = path.length - 1;
|
||||
@ -342,10 +342,15 @@ function generateMkvSchemaHierarchy(elements_: EbmlElementType[]) {
|
||||
const idMulti = new Set<string>();
|
||||
const preDefs = [
|
||||
'export const BinarySchema = type.instanceOf(Uint8Array);',
|
||||
'export type BinaryType = typeof BinarySchema.infer;',
|
||||
...Object.entries(AdHocType).map(
|
||||
([name, meta]) =>
|
||||
`export const ${meta.primitive()} = type.instanceOf(Ebml${name}Tag);`
|
||||
),
|
||||
...Object.entries(AdHocType).map(
|
||||
([name, meta]) =>
|
||||
`export type ${name}Type = typeof ${meta.primitive()}.infer;`
|
||||
),
|
||||
];
|
||||
|
||||
const generateAssociated = (el: EbmlElementType): string | undefined => {
|
||||
@ -363,7 +368,6 @@ function generateMkvSchemaHierarchy(elements_: EbmlElementType[]) {
|
||||
|
||||
const selfSchema = [
|
||||
`export const ${el.name}Schema = type({`,
|
||||
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: <explanation>
|
||||
...associated.map((v) => {
|
||||
let meta: any;
|
||||
const restriction = generateRestriction(v);
|
||||
@ -391,7 +395,7 @@ function generateMkvSchemaHierarchy(elements_: EbmlElementType[]) {
|
||||
if (v.maxOccurs !== 1) {
|
||||
expr = `${expr}.array()`;
|
||||
if (v.maxOccurs !== 1 && v.minOccurs === 1 && !v.default) {
|
||||
expr = `${expr}.atLeastLength(1)`
|
||||
expr = `${expr}.atLeastLength(1)`;
|
||||
}
|
||||
idMulti.add(v.name);
|
||||
}
|
||||
@ -401,9 +405,8 @@ function generateMkvSchemaHierarchy(elements_: EbmlElementType[]) {
|
||||
} else {
|
||||
childrenSchema.push(`export const ${v.name}Schema = match({
|
||||
"${meta.primitiveStr(v.name)}[]": v => v.length > 0 ? v : [${meta.default(v.default)}],
|
||||
"undefined": () => [${meta.default(v.default)}],
|
||||
default: "assert"
|
||||
});`);
|
||||
default: () => [${meta.default(v.default)}],
|
||||
}).optional();`);
|
||||
expr = `${v.name}Schema`;
|
||||
}
|
||||
} else if (!v.minOccurs) {
|
||||
@ -434,7 +437,7 @@ function main() {
|
||||
const elementSchemas = extractElementAll();
|
||||
|
||||
const files = {
|
||||
'schema.ts': [
|
||||
'schema': [
|
||||
generateMkvSchemaImports(elementSchemas),
|
||||
generateMkvSchemaHierarchy(elementSchemas),
|
||||
],
|
||||
|
54
scripts/download-samples.ts
Normal file
54
scripts/download-samples.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { exec } from 'node:child_process';
|
||||
import { promisify } from 'node:util';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import fsp from 'node:fs/promises';
|
||||
|
||||
|
||||
async function downloadAndExtract() {
|
||||
try {
|
||||
// 目标目录
|
||||
const targetDir = path.join(import.meta.dirname, '..', 'apps', 'mock', 'public', 'video', 'huge');
|
||||
const url = 'https://sourceforge.net/projects/matroska/files/test_files/matroska_test_w1_1.zip/download';
|
||||
const zipFile = 'matroska_test_w1_1.zip';
|
||||
const platform = os.platform();
|
||||
|
||||
const execPromise = (cmd: string) => promisify(exec)(cmd, {
|
||||
cwd: targetDir,
|
||||
shell: platform === 'win32' ? 'powershell' : undefined
|
||||
});
|
||||
|
||||
await fsp.mkdir(targetDir, { recursive: true })
|
||||
|
||||
console.log(`Working directory switched to: ${targetDir}`);
|
||||
|
||||
if (platform === 'win32') {
|
||||
// Windows: 使用 PowerShell 的 Invoke-WebRequest 和 Expand-Archive
|
||||
console.log('Downloading on Windows...');
|
||||
await execPromise(`Invoke-WebRequest -Uri '${url}' -OutFile '${zipFile}' -UserAgent "wget"`);
|
||||
console.log('Extracting on Windows...');
|
||||
await execPromise(`Expand-Archive -Path '${zipFile}' -DestinationPath '.' -Force`);
|
||||
console.log('Cleaning up...');
|
||||
await execPromise(`rm '${zipFile}'`);
|
||||
} else {
|
||||
// *nix: 使用 curl 和 unzip
|
||||
console.log('Downloading on *nix...');
|
||||
await execPromise(`curl -L "${url}" -o "${zipFile}"`);
|
||||
console.log('Extracting on *nix...');
|
||||
await execPromise(`unzip -o "${zipFile}"`);
|
||||
console.log('Cleaning up...');
|
||||
await execPromise(`rm "${zipFile}"`);
|
||||
}
|
||||
|
||||
console.log('Download and extraction completed successfully!');
|
||||
} catch (error) {
|
||||
console.error('An error occurred:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// 执行
|
||||
downloadAndExtract().catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1);
|
||||
});
|
@ -14,19 +14,22 @@
|
||||
"DOM.AsyncIterable",
|
||||
"DOM.Iterable"
|
||||
],
|
||||
"types": [
|
||||
"@webgpu/types",
|
||||
"@types/node"
|
||||
],
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"allowImportingTsExtensions": false,
|
||||
"emitDeclarationOnly": true,
|
||||
"skipLibCheck": true,
|
||||
"target": "ES2021",
|
||||
"strictNullChecks": true,
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"useDefineForClassFields": true,
|
||||
"useDefineForClassFields": false,
|
||||
"exactOptionalPropertyTypes": false,
|
||||
"experimentalDecorators": true
|
||||
}
|
||||
}
|
@ -9,6 +9,15 @@
|
||||
},
|
||||
{
|
||||
"path": "./tsconfig.scripts.json"
|
||||
},
|
||||
{
|
||||
"path": "./packages/matroska"
|
||||
},
|
||||
{
|
||||
"path": "./packages/core"
|
||||
},
|
||||
{
|
||||
"path": "./apps/test"
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Reference in New Issue
Block a user