diff --git a/apps/cli/src/main.rs b/apps/cli/src/main.rs index 2230225e..7d5156a2 100644 --- a/apps/cli/src/main.rs +++ b/apps/cli/src/main.rs @@ -5,8 +5,8 @@ use cap_media::sources::{get_target_fps, ScreenCaptureTarget}; use cap_project::{RecordingMeta, XY}; use cap_recording::RecordingOptions; use cap_rendering::RenderVideoConstants; -use clap::{Args, Parser, Subcommand, ValueEnum}; -use tokio::io::{AsyncBufReadExt, BufReader}; +use clap::{Args, Parser, Subcommand}; +use tokio::io::AsyncBufReadExt; use uuid::Uuid; #[derive(Parser)] diff --git a/apps/desktop/src-tauri/src/export.rs b/apps/desktop/src-tauri/src/export.rs index 0fa65775..7235822d 100644 --- a/apps/desktop/src-tauri/src/export.rs +++ b/apps/desktop/src-tauri/src/export.rs @@ -45,7 +45,7 @@ pub async fn export_video( ); let editor_instance = upsert_editor_instance(&app, video_id.clone()).await; - let total_frames = editor_instance.get_total_frames(); + let total_frames = editor_instance.get_total_frames(fps); let output_path = editor_instance.meta().output_path(); diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index f749d16b..23544acd 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -2390,12 +2390,16 @@ impl EventExt for T {} #[tauri::command(async)] #[specta::specta] -async fn get_editor_total_frames(app: AppHandle, video_id: String) -> Result { +async fn get_editor_total_frames( + app: AppHandle, + video_id: String, + fps: u32, +) -> Result { let editor_instances = app.state::(); let instances = editor_instances.lock().await; let instance = instances .get(&video_id) .ok_or_else(|| "Editor instance not found".to_string())?; - Ok(instance.get_total_frames()) + Ok(instance.get_total_frames(fps)) } diff --git a/apps/desktop/src/routes/editor/context.ts b/apps/desktop/src/routes/editor/context.ts index 3f9a14cd..fd717d9e 100644 --- a/apps/desktop/src/routes/editor/context.ts +++ b/apps/desktop/src/routes/editor/context.ts @@ -26,7 +26,7 @@ export type CurrentDialog = export type DialogState = { open: false } | ({ open: boolean } & CurrentDialog); -export const FPS = 30; +export const FPS = 60; export const [EditorContextProvider, useEditorContext] = createContextProvider( (props: { diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index de843bcd..ed4ba54d 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -161,8 +161,8 @@ async showWindow(window: ShowCapWindow) : Promise { async writeClipboardString(text: string) : Promise { return await TAURI_INVOKE("write_clipboard_string", { text }); }, -async getEditorTotalFrames(videoId: string) : Promise { - return await TAURI_INVOKE("get_editor_total_frames", { videoId }); +async getEditorTotalFrames(videoId: string, fps: number) : Promise { + return await TAURI_INVOKE("get_editor_total_frames", { videoId, fps }); } } diff --git a/crates/editor/src/editor_instance.rs b/crates/editor/src/editor_instance.rs index a3dd3cab..65501b7a 100644 --- a/crates/editor/src/editor_instance.rs +++ b/crates/editor/src/editor_instance.rs @@ -7,7 +7,7 @@ use cap_media::frame_ws::create_frame_ws; use cap_project::RecordingConfig; use cap_project::{CursorEvents, ProjectConfiguration, RecordingMeta, XY}; use cap_rendering::{ - ProjectRecordings, ProjectUniforms, RecordingSegmentDecoders, RenderOptions, + get_duration, ProjectRecordings, ProjectUniforms, RecordingSegmentDecoders, RenderOptions, RenderVideoConstants, SegmentVideoPaths, }; use std::ops::Deref; @@ -32,7 +32,7 @@ pub struct EditorInstance { ), ws_shutdown: Arc>>>, pub segments: Arc>, - pub total_frames: u32, + meta: RecordingMeta, } impl EditorInstance { @@ -61,13 +61,9 @@ impl EditorInstance { } let meta = cap_project::RecordingMeta::load_for_project(&project_path).unwrap(); + let project = meta.project_config(); let recordings = ProjectRecordings::new(&meta); - // Calculate total frames based on actual video duration and fps - let duration = recordings.duration(); - let fps = recordings.segments[0].display.fps(); - let total_frames = (duration * fps as f64).round() as u32; - let render_options = RenderOptions { screen_size: XY::new( recordings.segments[0].display.width, @@ -113,10 +109,10 @@ impl EditorInstance { })), on_state_change: Box::new(on_state_change), preview_tx, - project_config: watch::channel(meta.project_config()), + project_config: watch::channel(project), ws_shutdown: Arc::new(StdMutex::new(Some(ws_shutdown))), segments: Arc::new(segments), - total_frames, + meta, }); this.state.lock().await.preview_task = @@ -268,8 +264,15 @@ impl EditorInstance { }) } - pub fn get_total_frames(&self) -> u32 { - self.total_frames + pub fn get_total_frames(&self, fps: u32) -> u32 { + // Calculate total frames based on actual video duration and fps + let duration = get_duration( + &self.recordings, + &self.meta, + &self.project_config.1.borrow(), + ); + + (fps as f64 * duration).ceil() as u32 } } diff --git a/crates/export/src/lib.rs b/crates/export/src/lib.rs index 924df088..ed295c90 100644 --- a/crates/export/src/lib.rs +++ b/crates/export/src/lib.rs @@ -201,15 +201,16 @@ where .next_frame_data(samples, project.timeline.as_ref().map(|t| t)) { let mut frame = audio_info.wrap_frame(&frame_data, 0); - let pts = (frame_count as f64 * f64::from(audio_info.sample_rate) / f64::from(fps)) as i64; + let pts = (frame_count as f64 * f64::from(audio_info.sample_rate) + / f64::from(fps)) as i64; frame.set_pts(Some(pts)); - println!( - "Export: Sending audio frame {} with PTS: {:?}, samples: {}, data size: {}", - frame_count, - frame.pts(), - samples, - frame_data.len() - ); + // println!( + // "Export: Sending audio frame {} with PTS: {:?}, samples: {}, data size: {}", + // frame_count, + // frame.pts(), + // samples, + // frame_data.len() + // ); Some(frame) } else { None @@ -218,10 +219,10 @@ where None }; - println!( - "Export: Processing frame {} (size: {}x{}, padded_bytes_per_row: {})", - frame_count, frame.width, frame.height, frame.padded_bytes_per_row - ); + // println!( + // "Export: Processing frame {} (size: {}x{}, padded_bytes_per_row: {})", + // frame_count, frame.width, frame.height, frame.padded_bytes_per_row + // ); let mut video_frame = VideoInfo::from_raw( RawVideoFormat::Rgba, @@ -236,11 +237,11 @@ where ); video_frame.set_pts(Some(frame_count as i64)); - println!( - "Export: Sending frame {} to encoder (PTS: {:?})", - frame_count, - video_frame.pts() - ); + // println!( + // "Export: Sending frame {} to encoder (PTS: {:?})", + // frame_count, + // video_frame.pts() + // ); frame_tx .send(MP4Input { diff --git a/crates/media/src/encoders/mp4.rs b/crates/media/src/encoders/mp4.rs index 19460d3a..aac98f18 100644 --- a/crates/media/src/encoders/mp4.rs +++ b/crates/media/src/encoders/mp4.rs @@ -163,11 +163,11 @@ impl MP4Encoder { } pub fn queue_video_frame(&mut self, mut frame: FFVideo) { - println!( - "MP4Encoder: Processing frame {} (input PTS: {:?})", - self.video.frame_count, - frame.pts() - ); + // println!( + // "MP4Encoder: Processing frame {} (input PTS: {:?})", + // self.video.frame_count, + // frame.pts() + // ); let mut scaler = ffmpeg::software::converter( (frame.width(), frame.height()), frame.format(), @@ -181,10 +181,10 @@ impl MP4Encoder { // Set PTS in microseconds (1/1_000_000 second units) let pts = frame.pts().unwrap_or_else(|| self.video.frame_count); output.set_pts(Some(pts)); - println!( - "MP4Encoder: Setting frame {} PTS to {}", - self.video.frame_count, pts - ); + // println!( + // "MP4Encoder: Setting frame {} PTS to {}", + // self.video.frame_count, pts + // ); self.video.frame_count += 1; self.video.encoder.send_frame(&output).unwrap(); @@ -196,11 +196,11 @@ impl MP4Encoder { return; }; - println!( - "MP4Encoder: Queueing audio frame with PTS: {:?}, samples: {}", - frame.pts(), - frame.samples() - ); + // println!( + // "MP4Encoder: Queueing audio frame with PTS: {:?}, samples: {}", + // frame.pts(), + // frame.samples() + // ); audio.buffer.consume(frame); @@ -218,11 +218,11 @@ impl MP4Encoder { output.set_pts(Some(pts)); } - println!( - "MP4Encoder: Sending audio frame with PTS: {:?}, samples: {}", - output.pts(), - output.samples() - ); + // println!( + // "MP4Encoder: Sending audio frame with PTS: {:?}, samples: {}", + // output.pts(), + // output.samples() + // ); // Send frame to encoder audio.encoder.send_frame(&output).unwrap(); @@ -230,11 +230,11 @@ impl MP4Encoder { // Process any encoded packets let mut encoded_packet = FFPacket::empty(); while audio.encoder.receive_packet(&mut encoded_packet).is_ok() { - println!( - "MP4Encoder: Writing audio packet with PTS: {:?}, size: {}", - encoded_packet.pts(), - encoded_packet.size() - ); + // println!( + // "MP4Encoder: Writing audio packet with PTS: {:?}, size: {}", + // encoded_packet.pts(), + // encoded_packet.size() + // ); encoded_packet.set_stream(1); encoded_packet.rescale_ts( @@ -257,21 +257,21 @@ impl MP4Encoder { .receive_packet(&mut encoded_packet) .is_ok() { - println!( - "MP4Encoder: Got encoded packet with PTS: {:?}, DTS: {:?}", - encoded_packet.pts(), - encoded_packet.dts() - ); + // println!( + // "MP4Encoder: Got encoded packet with PTS: {:?}, DTS: {:?}", + // encoded_packet.pts(), + // encoded_packet.dts() + // ); encoded_packet.set_stream(0); // Video is stream 0 encoded_packet.rescale_ts( self.video.encoder.time_base(), self.output_ctx.stream(0).unwrap().time_base(), ); - println!( - "MP4Encoder: Writing packet with rescaled PTS: {:?}, DTS: {:?}", - encoded_packet.pts(), - encoded_packet.dts() - ); + // println!( + // "MP4Encoder: Writing packet with rescaled PTS: {:?}, DTS: {:?}", + // encoded_packet.pts(), + // encoded_packet.dts() + // ); encoded_packet .write_interleaved(&mut self.output_ctx) .unwrap(); diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index 73264d87..8028e997 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -131,19 +131,8 @@ impl RecordingSegmentDecoders { OptionFuture::from(self.camera.as_ref().map(|d| d.get_frame(frame_time))) ); - // Create black frames with the correct dimensions - let black_screen = vec![0; (1920 * 804 * 4) as usize]; - let black_camera = vec![0; (1920 * 1080 * 4) as usize]; - // Return frames or black frames as needed - Some(( - screen_frame.unwrap_or_else(|| Arc::new(black_screen)), - self.camera.as_ref().map(|_| { - camera_frame - .flatten() - .unwrap_or_else(|| Arc::new(black_camera)) - }), - )) + Some((screen_frame?, camera_frame.flatten())) } } @@ -182,43 +171,7 @@ pub async fn render_video_to_channel( let start_time = Instant::now(); // Get the duration from the timeline if it exists, otherwise use the longest source duration - let duration = { - let mut max_duration = recordings.duration(); - println!("Initial screen recording duration: {}", max_duration); - - // Check camera duration if it exists - if let Some(camera_path) = meta.content.camera_path() { - if let Ok(camera_duration) = recordings.get_source_duration(&camera_path) { - println!("Camera recording duration: {}", camera_duration); - max_duration = max_duration.max(camera_duration); - println!("New max duration after camera check: {}", max_duration); - } - } - - // If there's a timeline, ensure all segments extend to the max duration - if let Some(timeline) = &mut project.timeline { - println!("Found timeline with {} segments", timeline.segments.len()); - for (i, segment) in timeline.segments.iter_mut().enumerate() { - println!( - "Segment {} - current end: {}, max_duration: {}", - i, segment.end, max_duration - ); - if segment.end < max_duration { - segment.end = max_duration; - println!("Extended segment {} to new end: {}", i, segment.end); - } - } - let final_duration = timeline.duration(); - println!( - "Final timeline duration after adjustments: {}", - final_duration - ); - final_duration - } else { - println!("No timeline found, using max_duration: {}", max_duration); - max_duration - } - }; + let duration = get_duration(&recordings, meta, &project); let total_frames = (fps as f64 * duration).ceil() as u32; println!( @@ -235,12 +188,12 @@ pub async fn render_video_to_channel( } let (time, segment_i) = if let Some(timeline) = &project.timeline { - match timeline.get_recording_time(frame_number as f64 / 30_f64) { + match timeline.get_recording_time(frame_number as f64 / fps as f64) { Some(value) => (value.0, value.1), - None => (frame_number as f64 / 30_f64, Some(0u32)), + None => (frame_number as f64 / fps as f64, Some(0u32)), } } else { - (frame_number as f64 / 30_f64, Some(0u32)) + (frame_number as f64 / fps as f64, Some(0u32)) }; let segment = &segments[segment_i.unwrap() as usize]; @@ -280,6 +233,48 @@ pub async fn render_video_to_channel( Ok(()) } +pub fn get_duration( + recordings: &ProjectRecordings, + meta: &RecordingMeta, + project: &ProjectConfiguration, +) -> f64 { + let mut max_duration = recordings.duration(); + println!("Initial screen recording duration: {}", max_duration); + + // Check camera duration if it exists + if let Some(camera_path) = meta.content.camera_path() { + if let Ok(camera_duration) = recordings.get_source_duration(&camera_path) { + println!("Camera recording duration: {}", camera_duration); + max_duration = max_duration.max(camera_duration); + println!("New max duration after camera check: {}", max_duration); + } + } + + // If there's a timeline, ensure all segments extend to the max duration + if let Some(timeline) = &project.timeline { + println!("Found timeline with {} segments", timeline.segments.len()); + // for (i, segment) in timeline.segments.iter().enumerate() { + // println!( + // "Segment {} - current end: {}, max_duration: {}", + // i, segment.end, max_duration + // ); + // if segment.end < max_duration { + // segment.end = max_duration; + // println!("Extended segment {} to new end: {}", i, segment.end); + // } + // } + let final_duration = timeline.duration(); + println!( + "Final timeline duration after adjustments: {}", + final_duration + ); + final_duration + } else { + println!("No timeline found, using max_duration: {}", max_duration); + max_duration + } +} + pub struct RenderVideoConstants { pub _instance: wgpu::Instance, pub _adapter: wgpu::Adapter, @@ -969,6 +964,7 @@ pub async fn produce_frame( let texture_view = texture.create_view(&wgpu::TextureViewDescriptor::default()); + // dbg!(constants.options.screen_size.x, screen_frame.len()); constants.queue.write_texture( wgpu::ImageCopyTexture { texture: &texture,