diff --git a/frigate/api/media.py b/frigate/api/media.py
index 61c1e2b96..783b42e97 100644
--- a/frigate/api/media.py
+++ b/frigate/api/media.py
@@ -837,7 +837,19 @@ async def recording_clip(
dependencies=[Depends(require_camera_access)],
description="Returns an HLS playlist for the specified timestamp-range on the specified camera. Append /master.m3u8 or /index.m3u8 for HLS playback.",
)
-async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
+async def vod_ts(
+ camera_name: str,
+ start_ts: float,
+ end_ts: float,
+ force_discontinuity: bool = False,
+):
+ logger.debug(
+ "VOD: Generating VOD for %s from %s to %s with force_discontinuity=%s",
+ camera_name,
+ start_ts,
+ end_ts,
+ force_discontinuity,
+ )
recordings = (
Recordings.select(
Recordings.path,
@@ -862,6 +874,14 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
recording: Recordings
for recording in recordings:
+ logger.debug(
+ "VOD: processing recording: %s start=%s end=%s duration=%s",
+ recording.path,
+ recording.start_time,
+ recording.end_time,
+ recording.duration,
+ )
+
clip = {"type": "source", "path": recording.path}
duration = int(recording.duration * 1000)
@@ -870,6 +890,11 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
inpoint = int((start_ts - recording.start_time) * 1000)
clip["clipFrom"] = inpoint
duration -= inpoint
+ logger.debug(
+ "VOD: applied clipFrom %sms to %s",
+ inpoint,
+ recording.path,
+ )
# adjust end if recording.end_time is after end_ts
if recording.end_time > end_ts:
@@ -877,12 +902,23 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
if duration < min_duration_ms:
# skip if the clip has no valid duration (too short to contain frames)
+ logger.debug(
+ "VOD: skipping recording %s - resulting duration %sms too short",
+ recording.path,
+ duration,
+ )
continue
if min_duration_ms <= duration < max_duration_ms:
clip["keyFrameDurations"] = [duration]
clips.append(clip)
durations.append(duration)
+ logger.debug(
+ "VOD: added clip %s duration_ms=%s clipFrom=%s",
+ recording.path,
+ duration,
+ clip.get("clipFrom"),
+ )
else:
logger.warning(f"Recording clip is missing or empty: {recording.path}")
@@ -902,7 +938,7 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
return JSONResponse(
content={
"cache": hour_ago.timestamp() > start_ts,
- "discontinuity": False,
+ "discontinuity": force_discontinuity,
"consistentSequenceMediaInfo": True,
"durations": durations,
"segment_duration": max(durations),
@@ -986,6 +1022,19 @@ async def vod_event(
return vod_response
+@router.get(
+ "/vod/clip/{camera_name}/start/{start_ts}/end/{end_ts}",
+ dependencies=[Depends(require_camera_access)],
+ description="Returns an HLS playlist for a timestamp range with HLS discontinuity enabled. Append /master.m3u8 or /index.m3u8 for HLS playback.",
+)
+async def vod_clip(
+ camera_name: str,
+ start_ts: float,
+ end_ts: float,
+):
+ return await vod_ts(camera_name, start_ts, end_ts, force_discontinuity=True)
+
+
@router.get(
"/events/{event_id}/snapshot.jpg",
description="Returns a snapshot image for the specified object id. NOTE: The query params only take affect while the event is in-progress. Once the event has ended the snapshot configuration is used.",
diff --git a/web/src/components/overlay/detail/TrackingDetails.tsx b/web/src/components/overlay/detail/TrackingDetails.tsx
index 2cdd330ac..28a462487 100644
--- a/web/src/components/overlay/detail/TrackingDetails.tsx
+++ b/web/src/components/overlay/detail/TrackingDetails.tsx
@@ -446,7 +446,7 @@ export function TrackingDetails({
(event.end_time ?? Date.now() / 1000) + annotationOffset / 1000;
const startTime = eventStartRecord - REVIEW_PADDING;
const endTime = eventEndRecord + REVIEW_PADDING;
- const playlist = `${baseUrl}vod/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`;
+ const playlist = `${baseUrl}vod/clip/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`;
return {
playlist,
@@ -559,7 +559,6 @@ export function TrackingDetails({
isDetailMode={true}
camera={event.camera}
currentTimeOverride={currentTime}
- enableGapControllerRecovery={true}
/>
{isVideoLoading && (
diff --git a/web/src/components/player/HlsVideoPlayer.tsx b/web/src/components/player/HlsVideoPlayer.tsx
index aabd93ac8..b254b7f8b 100644
--- a/web/src/components/player/HlsVideoPlayer.tsx
+++ b/web/src/components/player/HlsVideoPlayer.tsx
@@ -57,7 +57,6 @@ type HlsVideoPlayerProps = {
isDetailMode?: boolean;
camera?: string;
currentTimeOverride?: number;
- enableGapControllerRecovery?: boolean;
};
export default function HlsVideoPlayer({
@@ -82,7 +81,6 @@ export default function HlsVideoPlayer({
isDetailMode = false,
camera,
currentTimeOverride,
- enableGapControllerRecovery = false,
}: HlsVideoPlayerProps) {
const { t } = useTranslation("components/player");
const { data: config } = useSWR("config");
@@ -173,21 +171,12 @@ export default function HlsVideoPlayer({
}
// Base HLS configuration
- const baseConfig: Partial = {
+ const hlsConfig: Partial = {
maxBufferLength: 10,
maxBufferSize: 20 * 1000 * 1000,
startPosition: currentSource.startPosition,
};
- const hlsConfig = { ...baseConfig };
-
- if (enableGapControllerRecovery) {
- hlsConfig.highBufferWatchdogPeriod = 1; // Check for stalls every 1 second (default: 3)
- hlsConfig.nudgeOffset = 0.2; // Nudge playhead forward 0.2s when stalled (default: 0.1)
- hlsConfig.nudgeMaxRetry = 5; // Try up to 5 nudges before giving up (default: 3)
- hlsConfig.maxBufferHole = 0.5; // Tolerate up to 0.5s gaps between fragments (default: 0.1)
- }
-
hlsRef.current = new Hls(hlsConfig);
hlsRef.current.attachMedia(videoRef.current);
hlsRef.current.loadSource(currentSource.playlist);
@@ -201,13 +190,7 @@ export default function HlsVideoPlayer({
hlsRef.current.destroy();
}
};
- }, [
- videoRef,
- hlsRef,
- useHlsCompat,
- currentSource,
- enableGapControllerRecovery,
- ]);
+ }, [videoRef, hlsRef, useHlsCompat, currentSource]);
// state handling