feat: add auto volume support

This commit is contained in:
master 2025-03-27 01:39:11 +08:00
parent 9896c4caec
commit ee4eee473d
2 changed files with 70 additions and 29 deletions

View File

@ -3,7 +3,8 @@
<head></head> <head></head>
<body> <body>
<!-- <my-element />--> <!-- <my-element />-->
<!-- <video-pipeline-demo src="/api/static/video/test.webm"></video-pipeline-demo>--> <!-- <video-pipeline-demo src="/api/static/video/test-hevc.mkv" width="800" height="450"></video-pipeline-demo> -->
<video-pipeline-demo src="/api/static/video/huge/[LoliHouse] Amagami-san Chi no Enmusubi - 23 [WebRip 1080p HEVC-10bit AAC SRTx2].mkv" width="800" height="450" /> <video-pipeline-demo src="/api/static/video/huge/test8.mkv" width="800" height="450"></video-pipeline-demo>
<!-- <video-pipeline-demo src="/api/static/video/huge/[LoliHouse] Amagami-san Chi no Enmusubi - 23 [WebRip 1080p HEVC-10bit AAC SRTx2].mkv" width="800" height="450" /> -->
</body> </body>

View File

@ -13,7 +13,10 @@ import {
fromEvent, fromEvent,
share, share,
takeUntil, takeUntil,
firstValueFrom, tap, throwIfEmpty, ReplaySubject, finalize, of, interval, firstValueFrom,
tap,
throwIfEmpty,
ReplaySubject,
} from 'rxjs'; } from 'rxjs';
import { createMatroska } from '@konoplayer/matroska/model'; import { createMatroska } from '@konoplayer/matroska/model';
import { createRef, ref, type Ref } from 'lit/directives/ref.js'; import { createRef, ref, type Ref } from 'lit/directives/ref.js';
@ -78,35 +81,47 @@ export class VideoPipelineDemo extends LitElement {
videoTrackDecoder, videoTrackDecoder,
audioTrackDecoder, audioTrackDecoder,
}, },
totalSize totalSize,
} = await firstValueFrom( } = await firstValueFrom(
createMatroska({ createMatroska({
url: src, url: src,
}).pipe( }).pipe(throwIfEmpty(() => new Error('failed to extract matroska')))
throwIfEmpty(() => new Error("failed to extract matroska")) );
)
)
console.debug(`[MATROSKA]: loaded metadata, total size ${totalSize} bytes`) console.debug(`[MATROSKA]: loaded metadata, total size ${totalSize} bytes`);
const currentCluster$ = this.seeked$.pipe( const currentCluster$ = this.seeked$.pipe(
switchMap((seekTime) => seek(seekTime)), switchMap((seekTime) => seek(seekTime)),
share({ resetOnRefCountZero: false, resetOnError: false, resetOnComplete: false }), share({
resetOnRefCountZero: false,
resetOnError: false,
resetOnComplete: false,
})
); );
defaultVideoTrack$ defaultVideoTrack$
.pipe(take(1), takeUntil(destroyRef$), tap((track) => console.debug('[MATROSKA]: video track loaded,', track))) .pipe(
take(1),
takeUntil(destroyRef$),
tap((track) => console.debug('[MATROSKA]: video track loaded,', track))
)
.subscribe(this.videoTrack$.next.bind(this.videoTrack$)); .subscribe(this.videoTrack$.next.bind(this.videoTrack$));
defaultAudioTrack$ defaultAudioTrack$
.pipe(take(1), takeUntil(destroyRef$), tap((track) => console.debug('[MATROSKA]: audio track loaded,', track))) .pipe(
take(1),
takeUntil(destroyRef$),
tap((track) => console.debug('[MATROSKA]: audio track loaded,', track))
)
.subscribe(this.audioTrack$.next.bind(this.audioTrack$)); .subscribe(this.audioTrack$.next.bind(this.audioTrack$));
this.videoTrack$ this.videoTrack$
.pipe( .pipe(
takeUntil(this.destroyRef$), takeUntil(this.destroyRef$),
switchMap((track) => switchMap((track) =>
track?.configuration ? videoTrackDecoder(track, currentCluster$) : EMPTY track?.configuration
? videoTrackDecoder(track, currentCluster$)
: EMPTY
), ),
switchMap(({ frame$ }) => frame$) switchMap(({ frame$ }) => frame$)
) )
@ -120,7 +135,9 @@ export class VideoPipelineDemo extends LitElement {
.pipe( .pipe(
takeUntil(this.destroyRef$), takeUntil(this.destroyRef$),
switchMap((track) => switchMap((track) =>
track?.configuration ? audioTrackDecoder(track, currentCluster$) : EMPTY track?.configuration
? audioTrackDecoder(track, currentCluster$)
: EMPTY
), ),
switchMap(({ frame$ }) => frame$) switchMap(({ frame$ }) => frame$)
) )
@ -144,22 +161,25 @@ export class VideoPipelineDemo extends LitElement {
), ),
}).pipe( }).pipe(
takeUntil(this.destroyRef$), takeUntil(this.destroyRef$),
map(({ ended, paused, videoBuffered, audioBuffered }) => !paused && !ended && !!(videoBuffered || audioBuffered)), map(
({ ended, paused, videoBuffered, audioBuffered }) =>
!paused && !ended && !!(videoBuffered || audioBuffered)
),
tap((enabled) => { tap((enabled) => {
if (enabled) { if (enabled) {
playableStartTime = performance.now() playableStartTime = performance.now();
} }
}), }),
share() share()
) );
let nextAudioStartTime = 0; let nextAudioStartTime = 0;
playable playable
.pipe( .pipe(
tap(() => { tap(() => {
nextAudioStartTime = 0 nextAudioStartTime = 0;
}), }),
switchMap((enabled) => (enabled ? animationFrames() : EMPTY)), switchMap((enabled) => (enabled ? animationFrames() : EMPTY))
) )
.subscribe(() => { .subscribe(() => {
const audioFrameBuffer = this.audioFrameBuffer$.getValue(); const audioFrameBuffer = this.audioFrameBuffer$.getValue();
@ -169,7 +189,7 @@ export class VideoPipelineDemo extends LitElement {
let audioChanged = false; let audioChanged = false;
while (audioFrameBuffer.size > 0) { while (audioFrameBuffer.size > 0) {
const firstAudio = audioFrameBuffer.peek(); const firstAudio = audioFrameBuffer.peek();
if (firstAudio && (firstAudio.timestamp / 1000) <= accTime) { if (firstAudio && firstAudio.timestamp / 1000 <= accTime) {
const audioFrame = audioFrameBuffer.dequeue()!; const audioFrame = audioFrameBuffer.dequeue()!;
audioChanged = true; audioChanged = true;
if (audioContext) { if (audioContext) {
@ -187,10 +207,14 @@ export class VideoPipelineDemo extends LitElement {
const fadeLength = Math.min(50, audioFrame.numberOfFrames); const fadeLength = Math.min(50, audioFrame.numberOfFrames);
for (let channel = 0; channel < numberOfChannels; channel++) { for (let channel = 0; channel < numberOfChannels; channel++) {
const channelData = new Float32Array(numberOfFrames); const channelData = new Float32Array(numberOfFrames);
audioFrame.copyTo(channelData, { planeIndex: channel, frameCount: numberOfFrames }); audioFrame.copyTo(channelData, {
planeIndex: channel,
frameCount: numberOfFrames,
});
for (let i = 0; i < fadeLength; i++) { for (let i = 0; i < fadeLength; i++) {
channelData[i] *= i / fadeLength; // fade-in channelData[i] *= i / fadeLength; // fade-in
channelData[audioFrame.numberOfFrames - 1 - i] *= i / fadeLength; // fade-out channelData[audioFrame.numberOfFrames - 1 - i] *=
i / fadeLength; // fade-out
} }
audioBuffer.copyToChannel(channelData, channel); audioBuffer.copyToChannel(channelData, channel);
} }
@ -222,9 +246,7 @@ export class VideoPipelineDemo extends LitElement {
}); });
playable playable
.pipe( .pipe(switchMap((enabled) => (enabled ? animationFrames() : EMPTY)))
switchMap((enabled) => (enabled ? animationFrames() : EMPTY)),
)
.subscribe(async () => { .subscribe(async () => {
const renderingContext = this.renderingContext; const renderingContext = this.renderingContext;
const videoFrameBuffer = this.videoFrameBuffer$.getValue(); const videoFrameBuffer = this.videoFrameBuffer$.getValue();
@ -233,7 +255,7 @@ export class VideoPipelineDemo extends LitElement {
const accTime = nowTime - playableStartTime; const accTime = nowTime - playableStartTime;
while (videoFrameBuffer.size > 0) { while (videoFrameBuffer.size > 0) {
const firstVideo = videoFrameBuffer.peek(); const firstVideo = videoFrameBuffer.peek();
if (firstVideo && (firstVideo.timestamp / 1000) <= accTime) { if (firstVideo && firstVideo.timestamp / 1000 <= accTime) {
const videoFrame = videoFrameBuffer.dequeue()!; const videoFrame = videoFrameBuffer.dequeue()!;
videoChanged = true; videoChanged = true;
if (renderingContext) { if (renderingContext) {
@ -252,12 +274,30 @@ export class VideoPipelineDemo extends LitElement {
fromEvent(document.body, 'click') fromEvent(document.body, 'click')
.pipe(takeUntil(this.destroyRef$)) .pipe(takeUntil(this.destroyRef$))
.subscribe(() => { .subscribe(async () => {
const permissionStatus = await navigator.permissions.query({
name: 'microphone',
});
if (permissionStatus.state === 'prompt') {
await navigator.mediaDevices.getUserMedia({
audio: true,
});
}
this.audioContext.resume(); this.audioContext.resume();
this.audioFrameBuffer$.next(this.audioFrameBuffer$.getValue()); this.audioFrameBuffer$.next(this.audioFrameBuffer$.getValue());
}); });
this.seeked$.next(0) const permissionStatus = await navigator.permissions.query({
name: 'microphone',
});
if (permissionStatus.state === 'granted') {
await navigator.mediaDevices.getUserMedia({
audio: true,
});
this.audioContext.resume();
}
this.seeked$.next(0);
} }
async connectedCallback() { async connectedCallback() {