diff --git a/apps/desktop/src/routes/(window-chrome)/(main).tsx b/apps/desktop/src/routes/(window-chrome)/(main).tsx index 3f1be75165..79062b2702 100644 --- a/apps/desktop/src/routes/(window-chrome)/(main).tsx +++ b/apps/desktop/src/routes/(window-chrome)/(main).tsx @@ -15,7 +15,6 @@ import { Suspense, } from "solid-js"; import { createStore, reconcile } from "solid-js/store"; - import Mode from "~/components/Mode"; import Tooltip from "~/components/Tooltip"; import { identifyUser, trackEvent } from "~/utils/analytics"; @@ -37,6 +36,9 @@ import { type RecordingMode, type ScreenCaptureTarget, } from "~/utils/tauri"; +import IconCapLogoFull from "~icons/cap/logo-full"; +import IconCapLogoFullDark from "~icons/cap/logo-full-dark"; +import IconLucideBug from "~icons/lucide/bug"; function getWindowSize() { return { diff --git a/apps/desktop/src/routes/(window-chrome)/settings/feedback.tsx b/apps/desktop/src/routes/(window-chrome)/settings/feedback.tsx index 806b7ad120..ae2b62d755 100644 --- a/apps/desktop/src/routes/(window-chrome)/settings/feedback.tsx +++ b/apps/desktop/src/routes/(window-chrome)/settings/feedback.tsx @@ -130,53 +130,70 @@ export default function FeedbackTab() {

} > - {(diag) => ( -
- - {(ver) => ( -
-

Operating System

-

- {ver().displayName} -

-
- )} -
- -
-

Capture Support

-
- - Screen Capture:{" "} - {diag().screenCaptureSupported - ? "Supported" - : "Not Supported"} - -
-
+ {(diag) => { + const d = diag(); + const osVersion = + "macosVersion" in d + ? d.macosVersion + : "windowsVersion" in d + ? d.windowsVersion + : null; + const captureSupported = + "screenCaptureSupported" in d + ? d.screenCaptureSupported + : "graphicsCaptureSupported" in d + ? d.graphicsCaptureSupported + : false; + return ( +
+ + {(ver) => ( +
+

+ Operating System +

+

+ {(ver() as { displayName: string }).displayName} +

+
+ )} +
- 0}>
-

Available Encoders

-
- - {(encoder) => ( - - {encoder} - - )} - +

Capture Support

+
+ + Screen Capture:{" "} + {captureSupported ? "Supported" : "Not Supported"} +
- -
- )} + + 0}> +
+

+ Available Encoders +

+
+ + {(encoder) => ( + + {encoder} + + )} + +
+
+
+
+ ); + }}
diff --git a/apps/desktop/src/routes/(window-chrome)/settings/integrations/index.tsx b/apps/desktop/src/routes/(window-chrome)/settings/integrations/index.tsx index 3581b30e07..491e2752bd 100644 --- a/apps/desktop/src/routes/(window-chrome)/settings/integrations/index.tsx +++ b/apps/desktop/src/routes/(window-chrome)/settings/integrations/index.tsx @@ -1,6 +1,7 @@ import { Button } from "@cap/ui-solid"; import { useNavigate } from "@solidjs/router"; import { For, onMount } from "solid-js"; +import IconLucideDatabase from "~icons/lucide/database"; import "@total-typescript/ts-reset/filter-boolean"; import { authStore } from "~/store"; diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index 18ba0fbe66..b41da5c469 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -364,6 +364,7 @@ uploadProgressEvent: "upload-progress-event" /** user-defined types **/ +export type AllGpusInfo = { gpus: GpuInfoDiag[]; primaryGpuIndex: number | null; isMultiGpuSystem: boolean; hasDiscreteGpu: boolean } export type Annotation = { id: string; type: AnnotationType; x: number; y: number; width: number; height: number; strokeColor: string; strokeWidth: number; fillColor: string; opacity: number; rotation: number; text: string | null; maskType?: MaskType | null; maskLevel?: number | null } export type AnnotationType = "arrow" | "circle" | "rectangle" | "text" | "mask" export type AppTheme = "system" | "light" | "dark" @@ -431,6 +432,7 @@ quality: number | null; * Whether to prioritize speed over quality (default: false) */ fast: boolean | null } +export type GpuInfoDiag = { vendor: string; description: string; dedicatedVideoMemoryMb: number; adapterIndex: number; isSoftwareAdapter: boolean; isBasicRenderDriver: boolean; supportsHardwareEncoding: boolean } export type HapticPattern = "alignment" | "levelChange" | "generic" export type HapticPerformanceTime = "default" | "now" | "drawCompleted" export type Hotkey = { code: string; meta: boolean; ctrl: boolean; alt: boolean; shift: boolean } @@ -443,7 +445,6 @@ export type JsonValue = [T] export type LogicalBounds = { position: LogicalPosition; size: LogicalSize } export type LogicalPosition = { x: number; y: number } export type LogicalSize = { width: number; height: number } -export type MacOSVersionInfo = { displayName: string } export type MainWindowRecordingStartBehaviour = "close" | "minimise" export type MaskKeyframes = { position?: MaskVectorKeyframe[]; size?: MaskVectorKeyframe[]; intensity?: MaskScalarKeyframe[] } export type MaskKind = "sensitive" | "highlight" @@ -486,6 +487,7 @@ export type RecordingStatus = "pending" | "recording" export type RecordingStopped = null export type RecordingTargetMode = "display" | "window" | "area" export type RenderFrameEvent = { frame_number: number; fps: number; resolution_base: XY } +export type RenderingStatus = { isUsingSoftwareRendering: boolean; isUsingBasicRenderDriver: boolean; hardwareEncodingAvailable: boolean; warningMessage: string | null } export type RequestOpenRecordingPicker = { target_mode: RecordingTargetMode | null } export type RequestOpenSettings = { page: string } export type RequestScreenCapturePrewarm = { force?: boolean } @@ -506,7 +508,7 @@ export type StartRecordingInputs = { capture_target: ScreenCaptureTarget; captur export type StereoMode = "stereo" | "monoL" | "monoR" export type StudioRecordingMeta = { segment: SingleSegment } | { inner: MultipleSegments } export type StudioRecordingStatus = { status: "InProgress" } | { status: "NeedsRemux" } | { status: "Failed"; error: string } | { status: "Complete" } -export type SystemDiagnostics = { macosVersion: MacOSVersionInfo | null; availableEncoders: string[]; screenCaptureSupported: boolean } +export type SystemDiagnostics = { windowsVersion: WindowsVersionInfo | null; gpuInfo: GpuInfoDiag | null; allGpus: AllGpusInfo | null; renderingStatus: RenderingStatus; availableEncoders: string[]; graphicsCaptureSupported: boolean; d3D11VideoProcessorAvailable: boolean } export type TargetUnderCursor = { display_id: DisplayId | null; window: WindowUnderCursor | null } export type TextSegment = { start: number; end: number; enabled?: boolean; content?: string; center?: XY; size?: XY; fontFamily?: string; fontSize?: number; fontWeight?: number; italic?: boolean; color?: string; fadeDuration?: number } export type TimelineConfiguration = { segments: TimelineSegment[]; zoomSegments: ZoomSegment[]; sceneSegments?: SceneSegment[]; maskSegments?: MaskSegment[]; textSegments?: TextSegment[] } @@ -523,6 +525,7 @@ export type VideoUploadInfo = { id: string; link: string; config: S3UploadMeta } export type WindowExclusion = { bundleIdentifier?: string | null; ownerName?: string | null; windowTitle?: string | null } export type WindowId = string export type WindowUnderCursor = { id: WindowId; app_name: string; bounds: LogicalBounds } +export type WindowsVersionInfo = { major: number; minor: number; build: number; displayName: string; meetsRequirements: boolean; isWindows11: boolean } export type XY = { x: T; y: T } export type ZoomMode = "auto" | { manual: { x: number; y: number } } export type ZoomSegment = { start: number; end: number; amount: number; mode: ZoomMode } diff --git a/crates/enc-ffmpeg/src/mux/segmented_stream.rs b/crates/enc-ffmpeg/src/mux/segmented_stream.rs index 91df680cc2..0ab253b0aa 100644 --- a/crates/enc-ffmpeg/src/mux/segmented_stream.rs +++ b/crates/enc-ffmpeg/src/mux/segmented_stream.rs @@ -148,6 +148,19 @@ impl SegmentedVideoEncoder { let mut output = format::output_as(&manifest_path, "dash")?; + let init_seg_path = base_path.join(INIT_SEGMENT_NAME); + let media_seg_pattern = base_path.join("segment_$Number%03d$.m4s"); + + #[cfg(windows)] + let init_seg_str = init_seg_path.to_string_lossy().replace('\\', "/"); + #[cfg(windows)] + let media_seg_str = media_seg_pattern.to_string_lossy().replace('\\', "/"); + + #[cfg(not(windows))] + let init_seg_str = init_seg_path.to_string_lossy().to_string(); + #[cfg(not(windows))] + let media_seg_str = media_seg_pattern.to_string_lossy().to_string(); + unsafe { let opts = output.as_mut_ptr(); @@ -157,8 +170,8 @@ impl SegmentedVideoEncoder { ffmpeg::ffi::av_opt_set((*opts).priv_data, k.as_ptr(), v.as_ptr(), 0); }; - set_opt("init_seg_name", INIT_SEGMENT_NAME); - set_opt("media_seg_name", "segment_$Number%03d$.m4s"); + set_opt("init_seg_name", &init_seg_str); + set_opt("media_seg_name", &media_seg_str); set_opt( "seg_duration", &config.segment_duration.as_secs_f64().to_string(), diff --git a/crates/recording/src/capture_pipeline.rs b/crates/recording/src/capture_pipeline.rs index 7f745d3470..60f97be63e 100644 --- a/crates/recording/src/capture_pipeline.rs +++ b/crates/recording/src/capture_pipeline.rs @@ -8,7 +8,11 @@ use crate::{ #[cfg(target_os = "macos")] use crate::output_pipeline::{MacOSFragmentedM4SMuxer, MacOSFragmentedM4SMuxerConfig}; +#[cfg(windows)] +use crate::output_pipeline::{WindowsFragmentedM4SMuxer, WindowsFragmentedM4SMuxerConfig}; use anyhow::anyhow; +#[cfg(windows)] +use cap_enc_ffmpeg::h264::H264Preset; use cap_timestamp::Timestamps; use std::{path::PathBuf, sync::Arc}; @@ -136,11 +140,9 @@ impl MakeCapturePipeline for screen_capture::Direct3DCapture { output_path: PathBuf, start_time: Timestamps, fragmented: bool, - _shared_pause_state: Option, + shared_pause_state: Option, encoder_preferences: EncoderPreferences, ) -> anyhow::Result { - let d3d_device = screen_capture.d3d_device.clone(); - if fragmented { let fragments_dir = output_path .parent() @@ -150,17 +152,15 @@ impl MakeCapturePipeline for screen_capture::Direct3DCapture { OutputPipeline::builder(fragments_dir) .with_video::(screen_capture) .with_timestamps(start_time) - .build::(WindowsSegmentedMuxerConfig { - pixel_format: screen_capture::Direct3DCapture::PIXEL_FORMAT.as_dxgi(), - d3d_device, - bitrate_multiplier: 0.15f32, - frame_rate: 30u32, - output_size: None, - encoder_preferences, + .build::(WindowsFragmentedM4SMuxerConfig { segment_duration: std::time::Duration::from_secs(3), + preset: H264Preset::Ultrafast, + output_size: None, + shared_pause_state, }) .await } else { + let d3d_device = screen_capture.d3d_device.clone(); OutputPipeline::builder(output_path.clone()) .with_video::(screen_capture) .with_timestamps(start_time) diff --git a/crates/recording/src/output_pipeline/mod.rs b/crates/recording/src/output_pipeline/mod.rs index 2f2bf6d2f1..43d2ed5c24 100644 --- a/crates/recording/src/output_pipeline/mod.rs +++ b/crates/recording/src/output_pipeline/mod.rs @@ -29,3 +29,8 @@ pub use win_segmented::*; mod win_segmented_camera; #[cfg(windows)] pub use win_segmented_camera::*; + +#[cfg(windows)] +mod win_fragmented_m4s; +#[cfg(windows)] +pub use win_fragmented_m4s::*; diff --git a/crates/recording/src/output_pipeline/win_fragmented_m4s.rs b/crates/recording/src/output_pipeline/win_fragmented_m4s.rs new file mode 100644 index 0000000000..9479b08096 --- /dev/null +++ b/crates/recording/src/output_pipeline/win_fragmented_m4s.rs @@ -0,0 +1,688 @@ +use crate::{ + AudioFrame, AudioMuxer, Muxer, SharedPauseState, TaskPool, VideoMuxer, + output_pipeline::{NativeCameraFrame, camera_frame_to_ffmpeg}, + screen_capture, +}; +use anyhow::{Context, anyhow}; +use cap_enc_ffmpeg::h264::{H264EncoderBuilder, H264Preset}; +use cap_enc_ffmpeg::segmented_stream::{SegmentedVideoEncoder, SegmentedVideoEncoderConfig}; +use cap_media_info::{AudioInfo, VideoInfo}; +use scap_ffmpeg::AsFFmpeg; +use std::{ + path::PathBuf, + sync::{ + Arc, Mutex, + atomic::AtomicBool, + mpsc::{SyncSender, sync_channel}, + }, + thread::JoinHandle, + time::Duration, +}; +use tracing::*; + +fn get_muxer_buffer_size() -> usize { + std::env::var("CAP_MUXER_BUFFER_SIZE") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(3) +} + +struct FrameDropTracker { + drops_in_window: u32, + frames_in_window: u32, + total_drops: u64, + total_frames: u64, + last_check: std::time::Instant, +} + +impl FrameDropTracker { + fn new() -> Self { + Self { + drops_in_window: 0, + frames_in_window: 0, + total_drops: 0, + total_frames: 0, + last_check: std::time::Instant::now(), + } + } + + fn record_frame(&mut self) { + self.frames_in_window += 1; + self.total_frames += 1; + self.check_drop_rate(); + } + + fn record_drop(&mut self) { + self.drops_in_window += 1; + self.total_drops += 1; + self.check_drop_rate(); + } + + fn check_drop_rate(&mut self) { + if self.last_check.elapsed() >= Duration::from_secs(5) { + let total_in_window = self.frames_in_window + self.drops_in_window; + if total_in_window > 0 { + let drop_rate = 100.0 * self.drops_in_window as f64 / total_in_window as f64; + if drop_rate > 5.0 { + warn!( + frames = self.frames_in_window, + drops = self.drops_in_window, + drop_rate_pct = format!("{:.1}%", drop_rate), + total_frames = self.total_frames, + total_drops = self.total_drops, + "Windows M4S muxer frame drop rate exceeds 5% threshold" + ); + } else if self.drops_in_window > 0 { + debug!( + frames = self.frames_in_window, + drops = self.drops_in_window, + drop_rate_pct = format!("{:.1}%", drop_rate), + "Windows M4S muxer frame stats" + ); + } + } + self.drops_in_window = 0; + self.frames_in_window = 0; + self.last_check = std::time::Instant::now(); + } + } +} + +struct EncoderState { + video_tx: SyncSender>, + encoder: Arc>, + encoder_handle: Option>>, +} + +pub struct WindowsFragmentedM4SMuxer { + base_path: PathBuf, + video_config: VideoInfo, + segment_duration: Duration, + preset: H264Preset, + output_size: Option<(u32, u32)>, + state: Option, + pause: SharedPauseState, + frame_drops: FrameDropTracker, + started: bool, +} + +pub struct WindowsFragmentedM4SMuxerConfig { + pub segment_duration: Duration, + pub preset: H264Preset, + pub output_size: Option<(u32, u32)>, + pub shared_pause_state: Option, +} + +impl Default for WindowsFragmentedM4SMuxerConfig { + fn default() -> Self { + Self { + segment_duration: Duration::from_secs(3), + preset: H264Preset::Ultrafast, + output_size: None, + shared_pause_state: None, + } + } +} + +impl Muxer for WindowsFragmentedM4SMuxer { + type Config = WindowsFragmentedM4SMuxerConfig; + + async fn setup( + config: Self::Config, + output_path: PathBuf, + video_config: Option, + _audio_config: Option, + pause_flag: Arc, + _tasks: &mut TaskPool, + ) -> anyhow::Result + where + Self: Sized, + { + let video_config = + video_config.ok_or_else(|| anyhow!("invariant: video config expected"))?; + + std::fs::create_dir_all(&output_path) + .with_context(|| format!("Failed to create segments directory: {output_path:?}"))?; + + let pause = config + .shared_pause_state + .unwrap_or_else(|| SharedPauseState::new(pause_flag)); + + Ok(Self { + base_path: output_path, + video_config, + segment_duration: config.segment_duration, + preset: config.preset, + output_size: config.output_size, + state: None, + pause, + frame_drops: FrameDropTracker::new(), + started: false, + }) + } + + fn stop(&mut self) { + if let Some(state) = &self.state + && let Err(e) = state.video_tx.send(None) + { + trace!("Windows M4S encoder channel already closed during stop: {e}"); + } + } + + fn finish(&mut self, timestamp: Duration) -> anyhow::Result> { + if let Some(mut state) = self.state.take() { + if let Err(e) = state.video_tx.send(None) { + trace!("Windows M4S encoder channel already closed during finish: {e}"); + } + + if let Some(handle) = state.encoder_handle.take() { + let timeout = Duration::from_secs(5); + let start = std::time::Instant::now(); + loop { + if handle.is_finished() { + match handle.join() { + Err(panic_payload) => { + warn!( + "Windows M4S encoder thread panicked during finish: {:?}", + panic_payload + ); + } + Ok(Err(e)) => { + warn!("Windows M4S encoder thread returned error: {e}"); + } + Ok(Ok(())) => {} + } + break; + } + if start.elapsed() > timeout { + warn!( + "Windows M4S encoder thread did not finish within {:?}, abandoning", + timeout + ); + break; + } + std::thread::sleep(Duration::from_millis(50)); + } + } + + if let Ok(mut encoder) = state.encoder.lock() + && let Err(e) = encoder.finish_with_timestamp(timestamp) + { + warn!("Failed to finish segmented encoder: {e}"); + } + } + + Ok(Ok(())) + } +} + +impl WindowsFragmentedM4SMuxer { + fn start_encoder(&mut self) -> anyhow::Result<()> { + let buffer_size = get_muxer_buffer_size(); + debug!( + buffer_size = buffer_size, + "Windows M4S muxer encoder channel buffer size" + ); + + let (video_tx, video_rx) = + sync_channel::>(buffer_size); + let (ready_tx, ready_rx) = sync_channel::>(1); + + let encoder_config = SegmentedVideoEncoderConfig { + segment_duration: self.segment_duration, + preset: self.preset, + bpp: H264EncoderBuilder::QUALITY_BPP, + output_size: self.output_size, + }; + + let encoder = + SegmentedVideoEncoder::init(self.base_path.clone(), self.video_config, encoder_config)?; + let encoder = Arc::new(Mutex::new(encoder)); + let encoder_clone = encoder.clone(); + + let encoder_handle = std::thread::Builder::new() + .name("win-m4s-segment-encoder".to_string()) + .spawn(move || { + if ready_tx.send(Ok(())).is_err() { + return Err(anyhow!("Failed to send ready signal - receiver dropped")); + } + + let mut slow_convert_count = 0u32; + let mut slow_encode_count = 0u32; + let mut total_frames = 0u64; + const SLOW_THRESHOLD_MS: u128 = 5; + + while let Ok(Some((d3d_frame, timestamp))) = video_rx.recv() { + let convert_start = std::time::Instant::now(); + + let ffmpeg_frame_result = d3d_frame.as_ffmpeg(); + let convert_elapsed_ms = convert_start.elapsed().as_millis(); + + if convert_elapsed_ms > SLOW_THRESHOLD_MS { + slow_convert_count += 1; + if slow_convert_count <= 5 || slow_convert_count.is_multiple_of(100) { + debug!( + elapsed_ms = convert_elapsed_ms, + count = slow_convert_count, + "D3D11 frame conversion exceeded {}ms threshold", + SLOW_THRESHOLD_MS + ); + } + } + + match ffmpeg_frame_result { + Ok(ffmpeg_frame) => { + let encode_start = std::time::Instant::now(); + + if let Ok(mut encoder) = encoder_clone.lock() + && let Err(e) = encoder.queue_frame(ffmpeg_frame, timestamp) + { + warn!("Failed to encode frame: {e}"); + } + + let encode_elapsed_ms = encode_start.elapsed().as_millis(); + + if encode_elapsed_ms > SLOW_THRESHOLD_MS { + slow_encode_count += 1; + if slow_encode_count <= 5 || slow_encode_count.is_multiple_of(100) { + debug!( + elapsed_ms = encode_elapsed_ms, + count = slow_encode_count, + "encoder.queue_frame exceeded {}ms threshold", + SLOW_THRESHOLD_MS + ); + } + } + } + Err(e) => { + warn!("Failed to convert D3D11 frame to FFmpeg: {e:?}"); + } + } + + total_frames += 1; + } + + if total_frames > 0 { + debug!( + total_frames = total_frames, + slow_converts = slow_convert_count, + slow_encodes = slow_encode_count, + slow_convert_pct = format!( + "{:.1}%", + 100.0 * slow_convert_count as f64 / total_frames as f64 + ), + slow_encode_pct = format!( + "{:.1}%", + 100.0 * slow_encode_count as f64 / total_frames as f64 + ), + "Windows M4S encoder timing summary (using SegmentedVideoEncoder)" + ); + } + + Ok(()) + })?; + + ready_rx + .recv() + .map_err(|_| anyhow!("Windows M4S encoder thread ended unexpectedly"))??; + + self.state = Some(EncoderState { + video_tx, + encoder, + encoder_handle: Some(encoder_handle), + }); + + self.started = true; + + info!( + path = %self.base_path.display(), + "Started Windows M4S fragmented video encoder" + ); + + Ok(()) + } +} + +impl VideoMuxer for WindowsFragmentedM4SMuxer { + type VideoFrame = screen_capture::VideoFrame; + + fn send_video_frame( + &mut self, + frame: Self::VideoFrame, + timestamp: Duration, + ) -> anyhow::Result<()> { + let Some(adjusted_timestamp) = self.pause.adjust(timestamp)? else { + return Ok(()); + }; + + if !self.started { + self.start_encoder()?; + } + + if let Some(state) = &self.state { + match state + .video_tx + .try_send(Some((frame.frame, adjusted_timestamp))) + { + Ok(()) => { + self.frame_drops.record_frame(); + } + Err(e) => match e { + std::sync::mpsc::TrySendError::Full(_) => { + self.frame_drops.record_drop(); + } + std::sync::mpsc::TrySendError::Disconnected(_) => { + trace!("Windows M4S encoder channel disconnected"); + } + }, + } + } + + Ok(()) + } +} + +impl AudioMuxer for WindowsFragmentedM4SMuxer { + fn send_audio_frame(&mut self, _frame: AudioFrame, _timestamp: Duration) -> anyhow::Result<()> { + Ok(()) + } +} + +struct CameraEncoderState { + video_tx: SyncSender>, + encoder: Arc>, + encoder_handle: Option>>, +} + +pub struct WindowsFragmentedM4SCameraMuxer { + base_path: PathBuf, + video_config: VideoInfo, + segment_duration: Duration, + preset: H264Preset, + output_size: Option<(u32, u32)>, + state: Option, + pause: SharedPauseState, + frame_drops: FrameDropTracker, + started: bool, +} + +pub struct WindowsFragmentedM4SCameraMuxerConfig { + pub segment_duration: Duration, + pub preset: H264Preset, + pub output_size: Option<(u32, u32)>, + pub shared_pause_state: Option, +} + +impl Default for WindowsFragmentedM4SCameraMuxerConfig { + fn default() -> Self { + Self { + segment_duration: Duration::from_secs(3), + preset: H264Preset::Ultrafast, + output_size: None, + shared_pause_state: None, + } + } +} + +impl Muxer for WindowsFragmentedM4SCameraMuxer { + type Config = WindowsFragmentedM4SCameraMuxerConfig; + + async fn setup( + config: Self::Config, + output_path: PathBuf, + video_config: Option, + _audio_config: Option, + pause_flag: Arc, + _tasks: &mut TaskPool, + ) -> anyhow::Result + where + Self: Sized, + { + let video_config = + video_config.ok_or_else(|| anyhow!("invariant: video config expected for camera"))?; + + std::fs::create_dir_all(&output_path).with_context(|| { + format!("Failed to create camera segments directory: {output_path:?}") + })?; + + let pause = config + .shared_pause_state + .unwrap_or_else(|| SharedPauseState::new(pause_flag)); + + Ok(Self { + base_path: output_path, + video_config, + segment_duration: config.segment_duration, + preset: config.preset, + output_size: config.output_size, + state: None, + pause, + frame_drops: FrameDropTracker::new(), + started: false, + }) + } + + fn stop(&mut self) { + if let Some(state) = &self.state + && let Err(e) = state.video_tx.send(None) + { + trace!("Windows M4S camera encoder channel already closed during stop: {e}"); + } + } + + fn finish(&mut self, timestamp: Duration) -> anyhow::Result> { + if let Some(mut state) = self.state.take() { + if let Err(e) = state.video_tx.send(None) { + trace!("Windows M4S camera encoder channel already closed during finish: {e}"); + } + + if let Some(handle) = state.encoder_handle.take() { + let timeout = Duration::from_secs(5); + let start = std::time::Instant::now(); + loop { + if handle.is_finished() { + match handle.join() { + Err(panic_payload) => { + warn!( + "Windows M4S camera encoder thread panicked during finish: {:?}", + panic_payload + ); + } + Ok(Err(e)) => { + warn!("Windows M4S camera encoder thread returned error: {e}"); + } + Ok(Ok(())) => {} + } + break; + } + if start.elapsed() > timeout { + warn!( + "Windows M4S camera encoder thread did not finish within {:?}, abandoning", + timeout + ); + break; + } + std::thread::sleep(Duration::from_millis(50)); + } + } + + if let Ok(mut encoder) = state.encoder.lock() + && let Err(e) = encoder.finish_with_timestamp(timestamp) + { + warn!("Failed to finish camera segmented encoder: {e}"); + } + } + + Ok(Ok(())) + } +} + +impl WindowsFragmentedM4SCameraMuxer { + fn start_encoder(&mut self) -> anyhow::Result<()> { + let buffer_size = get_muxer_buffer_size(); + debug!( + buffer_size = buffer_size, + "Windows M4S camera muxer encoder channel buffer size" + ); + + let (video_tx, video_rx) = + sync_channel::>(buffer_size); + let (ready_tx, ready_rx) = sync_channel::>(1); + + let encoder_config = SegmentedVideoEncoderConfig { + segment_duration: self.segment_duration, + preset: self.preset, + bpp: H264EncoderBuilder::QUALITY_BPP, + output_size: self.output_size, + }; + + let encoder = + SegmentedVideoEncoder::init(self.base_path.clone(), self.video_config, encoder_config)?; + let encoder = Arc::new(Mutex::new(encoder)); + let encoder_clone = encoder.clone(); + + let encoder_handle = std::thread::Builder::new() + .name("win-m4s-camera-segment-encoder".to_string()) + .spawn(move || { + if ready_tx.send(Ok(())).is_err() { + return Err(anyhow!( + "Failed to send ready signal - camera receiver dropped" + )); + } + + let mut slow_convert_count = 0u32; + let mut slow_encode_count = 0u32; + let mut total_frames = 0u64; + const SLOW_THRESHOLD_MS: u128 = 5; + + while let Ok(Some((camera_frame, timestamp))) = video_rx.recv() { + let convert_start = std::time::Instant::now(); + let ffmpeg_frame_result = camera_frame_to_ffmpeg(&camera_frame); + let convert_elapsed_ms = convert_start.elapsed().as_millis(); + + if convert_elapsed_ms > SLOW_THRESHOLD_MS { + slow_convert_count += 1; + if slow_convert_count <= 5 || slow_convert_count.is_multiple_of(100) { + debug!( + elapsed_ms = convert_elapsed_ms, + count = slow_convert_count, + "Camera frame conversion exceeded {}ms threshold", + SLOW_THRESHOLD_MS + ); + } + } + + match ffmpeg_frame_result { + Ok(ffmpeg_frame) => { + let encode_start = std::time::Instant::now(); + + if let Ok(mut encoder) = encoder_clone.lock() + && let Err(e) = encoder.queue_frame(ffmpeg_frame, timestamp) + { + warn!("Failed to encode camera frame: {e}"); + } + + let encode_elapsed_ms = encode_start.elapsed().as_millis(); + + if encode_elapsed_ms > SLOW_THRESHOLD_MS { + slow_encode_count += 1; + if slow_encode_count <= 5 || slow_encode_count.is_multiple_of(100) { + debug!( + elapsed_ms = encode_elapsed_ms, + count = slow_encode_count, + "Camera encoder.queue_frame exceeded {}ms threshold", + SLOW_THRESHOLD_MS + ); + } + } + } + Err(e) => { + warn!("Failed to convert camera frame: {e:?}"); + } + } + + total_frames += 1; + } + + if total_frames > 0 { + debug!( + total_frames = total_frames, + slow_converts = slow_convert_count, + slow_encodes = slow_encode_count, + slow_convert_pct = format!( + "{:.1}%", + 100.0 * slow_convert_count as f64 / total_frames as f64 + ), + slow_encode_pct = format!( + "{:.1}%", + 100.0 * slow_encode_count as f64 / total_frames as f64 + ), + "Windows M4S camera encoder timing summary" + ); + } + + Ok(()) + })?; + + ready_rx + .recv() + .map_err(|_| anyhow!("Windows M4S camera encoder thread ended unexpectedly"))??; + + self.state = Some(CameraEncoderState { + video_tx, + encoder, + encoder_handle: Some(encoder_handle), + }); + + self.started = true; + + info!( + path = %self.base_path.display(), + "Started Windows M4S fragmented camera encoder" + ); + + Ok(()) + } +} + +impl VideoMuxer for WindowsFragmentedM4SCameraMuxer { + type VideoFrame = NativeCameraFrame; + + fn send_video_frame( + &mut self, + frame: Self::VideoFrame, + timestamp: Duration, + ) -> anyhow::Result<()> { + let Some(adjusted_timestamp) = self.pause.adjust(timestamp)? else { + return Ok(()); + }; + + if !self.started { + self.start_encoder()?; + } + + if let Some(state) = &self.state { + match state.video_tx.try_send(Some((frame, adjusted_timestamp))) { + Ok(()) => { + self.frame_drops.record_frame(); + } + Err(e) => match e { + std::sync::mpsc::TrySendError::Full(_) => { + self.frame_drops.record_drop(); + } + std::sync::mpsc::TrySendError::Disconnected(_) => { + trace!("Windows M4S camera encoder channel disconnected"); + } + }, + } + } + + Ok(()) + } +} + +impl AudioMuxer for WindowsFragmentedM4SCameraMuxer { + fn send_audio_frame(&mut self, _frame: AudioFrame, _timestamp: Duration) -> anyhow::Result<()> { + Ok(()) + } +} diff --git a/crates/recording/src/studio_recording.rs b/crates/recording/src/studio_recording.rs index ae2f50a6ec..bc437d3fbb 100644 --- a/crates/recording/src/studio_recording.rs +++ b/crates/recording/src/studio_recording.rs @@ -19,8 +19,8 @@ use crate::output_pipeline::{ #[cfg(windows)] use crate::output_pipeline::{ - WindowsCameraMuxer, WindowsCameraMuxerConfig, WindowsSegmentedCameraMuxer, - WindowsSegmentedCameraMuxerConfig, + WindowsCameraMuxer, WindowsCameraMuxerConfig, WindowsFragmentedM4SCameraMuxer, + WindowsFragmentedM4SCameraMuxerConfig, }; use anyhow::{Context as _, anyhow, bail}; use cap_media_info::VideoInfo; @@ -877,7 +877,13 @@ async fn create_segment_pipeline( }; #[cfg(windows)] - let shared_pause_state: Option = None; + let shared_pause_state = if fragmented { + Some(SharedPauseState::new(Arc::new( + std::sync::atomic::AtomicBool::new(false), + ))) + } else { + None + }; let screen = ScreenCaptureMethod::make_studio_mode_pipeline( capture_source, @@ -925,8 +931,8 @@ async fn create_segment_pipeline( OutputPipeline::builder(fragments_dir) .with_video::(camera_feed) .with_timestamps(start_time) - .build::(WindowsSegmentedCameraMuxerConfig { - encoder_preferences: encoder_preferences.clone(), + .build::(WindowsFragmentedM4SCameraMuxerConfig { + shared_pause_state: shared_pause_state.clone(), ..Default::default() }) .instrument(error_span!("camera-out")) diff --git a/packages/ui-solid/src/auto-imports.d.ts b/packages/ui-solid/src/auto-imports.d.ts index b44723b447..ad6da52537 100644 --- a/packages/ui-solid/src/auto-imports.d.ts +++ b/packages/ui-solid/src/auto-imports.d.ts @@ -6,10 +6,8 @@ // biome-ignore lint: disable export {} declare global { - const IconCapArrowLeft: typeof import('~icons/cap/arrow-left.jsx')['default'] const IconCapArrows: typeof import('~icons/cap/arrows.jsx')['default'] const IconCapAudioOn: typeof import('~icons/cap/audio-on.jsx')['default'] - const IconCapAuto: typeof import('~icons/cap/auto.jsx')['default'] const IconCapBgBlur: typeof import('~icons/cap/bg-blur.jsx')['default'] const IconCapCamera: typeof import('~icons/cap/camera.jsx')['default'] const IconCapCaptions: typeof import('~icons/cap/captions.jsx')['default'] @@ -27,20 +25,16 @@ declare global { const IconCapCursorWindows: typeof import('~icons/cap/cursor-windows.jsx')['default'] const IconCapEnlarge: typeof import('~icons/cap/enlarge.jsx')['default'] const IconCapFile: typeof import('~icons/cap/file.jsx')['default'] - const IconCapFilm: typeof import('~icons/cap/film.jsx')['default'] const IconCapFilmCut: typeof import('~icons/cap/film-cut.jsx')['default'] const IconCapGauge: typeof import('~icons/cap/gauge.jsx')['default'] const IconCapGear: typeof import('~icons/cap/gear.jsx')['default'] const IconCapHotkeys: typeof import('~icons/cap/hotkeys.jsx')['default'] const IconCapImage: typeof import('~icons/cap/image.jsx')['default'] - const IconCapImageFilled: typeof import('~icons/cap/image-filled.jsx')['default'] const IconCapInfo: typeof import('~icons/cap/info.jsx')['default'] const IconCapInstant: typeof import('~icons/cap/instant.jsx')['default'] const IconCapLayout: typeof import('~icons/cap/layout.jsx')['default'] const IconCapLink: typeof import('~icons/cap/link.jsx')['default'] const IconCapLogo: typeof import('~icons/cap/logo.jsx')['default'] - const IconCapLogoFull: typeof import('~icons/cap/logo-full.jsx')['default'] - const IconCapLogoFullDark: typeof import('~icons/cap/logo-full-dark.jsx')['default'] const IconCapMessageBubble: typeof import('~icons/cap/message-bubble.jsx')['default'] const IconCapMicrophone: typeof import('~icons/cap/microphone.jsx')['default'] const IconCapMonitor: typeof import('~icons/cap/monitor.jsx')['default'] @@ -56,7 +50,6 @@ declare global { const IconCapRedo: typeof import('~icons/cap/redo.jsx')['default'] const IconCapRestart: typeof import('~icons/cap/restart.jsx')['default'] const IconCapScissors: typeof import('~icons/cap/scissors.jsx')['default'] - const IconCapScreen: typeof import('~icons/cap/screen.jsx')['default'] const IconCapScreenshot: typeof import('~icons/cap/screenshot.jsx')['default'] const IconCapSettings: typeof import('~icons/cap/settings.jsx')['default'] const IconCapShadow: typeof import('~icons/cap/shadow.jsx')['default'] @@ -73,11 +66,9 @@ declare global { const IconLucideArrowLeft: typeof import('~icons/lucide/arrow-left.jsx')['default'] const IconLucideBell: typeof import('~icons/lucide/bell.jsx')['default'] const IconLucideBoxSelect: typeof import('~icons/lucide/box-select.jsx')['default'] - const IconLucideBug: typeof import('~icons/lucide/bug.jsx')['default'] const IconLucideCheck: typeof import('~icons/lucide/check.jsx')['default'] const IconLucideClapperboard: typeof import('~icons/lucide/clapperboard.jsx')['default'] const IconLucideClock: typeof import('~icons/lucide/clock.jsx')['default'] - const IconLucideDatabase: typeof import('~icons/lucide/database.jsx')['default'] const IconLucideEdit: typeof import('~icons/lucide/edit.jsx')['default'] const IconLucideEyeOff: typeof import('~icons/lucide/eye-off.jsx')['default'] const IconLucideFastForward: typeof import('~icons/lucide/fast-forward.jsx')['default'] @@ -88,7 +79,6 @@ declare global { const IconLucideImage: typeof import('~icons/lucide/image.jsx')['default'] const IconLucideInfo: typeof import('~icons/lucide/info.jsx')['default'] const IconLucideLayout: typeof import('~icons/lucide/layout.jsx')['default'] - const IconLucideLoader2: typeof import('~icons/lucide/loader2.jsx')['default'] const IconLucideLoaderCircle: typeof import('~icons/lucide/loader-circle.jsx')['default'] const IconLucideMaximize: typeof import('~icons/lucide/maximize.jsx')['default'] const IconLucideMaximize2: typeof import('~icons/lucide/maximize2.jsx')['default'] @@ -99,11 +89,9 @@ declare global { const IconLucideRatio: typeof import('~icons/lucide/ratio.jsx')['default'] const IconLucideRectangleHorizontal: typeof import('~icons/lucide/rectangle-horizontal.jsx')['default'] const IconLucideRotateCcw: typeof import('~icons/lucide/rotate-ccw.jsx')['default'] - const IconLucideSave: typeof import('~icons/lucide/save.jsx')['default'] const IconLucideSearch: typeof import('~icons/lucide/search.jsx')['default'] const IconLucideSparkles: typeof import('~icons/lucide/sparkles.jsx')['default'] const IconLucideSquarePlay: typeof import('~icons/lucide/square-play.jsx')['default'] - const IconLucideTimer: typeof import('~icons/lucide/timer.jsx')['default'] const IconLucideType: typeof import('~icons/lucide/type.jsx')['default'] const IconLucideUnplug: typeof import('~icons/lucide/unplug.jsx')['default'] const IconLucideVideo: typeof import('~icons/lucide/video.jsx')['default']