add wayland record and mouse toggle

This commit is contained in:
Salem Yaslem 2023-10-19 06:58:46 +03:00
parent 8b7343c8f8
commit b959262fba
5 changed files with 175 additions and 76 deletions

View File

@ -1,4 +1,6 @@
extern crate subprocess; extern crate subprocess;
use crate::utils::{is_snap, is_wayland};
use crate::wayland_record::{CursorModeTypes, RecordTypes, WaylandRecorder};
use chrono::prelude::*; use chrono::prelude::*;
use gettextrs::gettext; use gettextrs::gettext;
use gtk::{prelude::*, ResponseType}; use gtk::{prelude::*, ResponseType};
@ -12,7 +14,6 @@ use std::sync::mpsc::Sender;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
use subprocess::Exec; use subprocess::Exec;
use crate::wayland_record::WaylandRecorder;
#[derive(Clone)] #[derive(Clone)]
pub struct ProgressWidget { pub struct ProgressWidget {
@ -61,7 +62,8 @@ pub struct Ffmpeg {
pub unbound: Option<Sender<bool>>, pub unbound: Option<Sender<bool>>,
pub progress_widget: ProgressWidget, pub progress_widget: ProgressWidget,
pub window: Window, pub window: Window,
pub record_wayland: WaylandRecorder pub record_wayland: WaylandRecorder,
pub main_context: gtk::glib::MainContext,
} }
impl Ffmpeg { impl Ffmpeg {
@ -99,7 +101,7 @@ impl Ffmpeg {
&gettext("File already exist. Do you want to overwrite it?"), &gettext("File already exist. Do you want to overwrite it?"),
); );
let answer = glib::MainContext::default().block_on(message_dialog.run_future()); let answer = self.main_context.block_on(message_dialog.run_future());
message_dialog.close(); message_dialog.close();
if answer != ResponseType::Yes { if answer != ResponseType::Yes {
@ -107,23 +109,7 @@ impl Ffmpeg {
} }
} }
if self.record_audio.is_active() { if self.record_video.is_active() && !is_wayland() {
let mut ffmpeg_command = Command::new("ffmpeg");
ffmpeg_command.arg("-f");
ffmpeg_command.arg("pulse");
ffmpeg_command.arg("-i");
ffmpeg_command.arg(&self.audio_id.active_id().unwrap());
ffmpeg_command.arg("-f");
ffmpeg_command.arg("ogg");
ffmpeg_command.arg(format!(
"{}.temp.audio",
self.saved_filename.as_ref().unwrap()
));
ffmpeg_command.arg("-y");
self.audio_process = Some(Rc::new(RefCell::new(ffmpeg_command.spawn().unwrap())));
}
if self.record_video.is_active() {
let mut ffmpeg_command: Command = Command::new("ffmpeg"); let mut ffmpeg_command: Command = Command::new("ffmpeg");
// record video with specified width and hight // record video with specified width and hight
@ -183,19 +169,57 @@ impl Ffmpeg {
// start recording and return the process id // start recording and return the process id
self.video_process = Some(Rc::new(RefCell::new(ffmpeg_command.spawn().unwrap()))); self.video_process = Some(Rc::new(RefCell::new(ffmpeg_command.spawn().unwrap())));
} else if self.record_video.is_active() && is_wayland() {
sleep(Duration::from_secs(self.record_delay.value() as u64));
if !self
.main_context
.block_on(self.record_wayland.start(
format!(
"{}.temp.without.audio.webm",
self.saved_filename.as_ref().unwrap()
),
RecordTypes::Monitor,
{
if self.record_mouse.is_active() {
CursorModeTypes::Show
} else {
CursorModeTypes::Hidden
}
},
))
{
println!("failed to start recording");
return None;
}
}
if self.record_audio.is_active() {
let mut ffmpeg_command = Command::new("ffmpeg");
ffmpeg_command.arg("-f");
ffmpeg_command.arg("pulse");
ffmpeg_command.arg("-i");
ffmpeg_command.arg(&self.audio_id.active_id().unwrap());
ffmpeg_command.arg("-f");
ffmpeg_command.arg("ogg");
ffmpeg_command.arg(format!(
"{}.temp.audio",
self.saved_filename.as_ref().unwrap()
));
ffmpeg_command.arg("-y");
self.audio_process = Some(Rc::new(RefCell::new(ffmpeg_command.spawn().unwrap())));
} }
Some(()) Some(())
} }
pub fn stop_record(&self) { pub fn stop_record(&mut self) {
self.progress_widget.show(); self.progress_widget.show();
// kill the process to stop recording self.progress_widget.set_progress("".to_string(), 1, 7);
self.progress_widget.set_progress("".to_string(), 1, 6);
// kill the process to stop recording
if self.video_process.is_some() { if self.video_process.is_some() {
self.progress_widget self.progress_widget
.set_progress("Stop Recording Video".to_string(), 1, 6); .set_progress("Stop Recording Video".to_string(), 1, 7);
Command::new("kill") Command::new("kill")
.arg(format!( .arg(format!(
@ -213,13 +237,16 @@ impl Ffmpeg {
.unwrap(); .unwrap();
println!("video killed"); println!("video killed");
} else if is_wayland() {
self.main_context
.block_on(self.record_wayland.stop());
} }
self.progress_widget.set_progress("".to_string(), 2, 6); self.progress_widget.set_progress("".to_string(), 2, 7);
if self.audio_process.is_some() { if self.audio_process.is_some() {
self.progress_widget self.progress_widget
.set_progress("Stop Recording Audio".to_string(), 2, 6); .set_progress("Stop Recording Audio".to_string(), 2, 7);
Command::new("kill") Command::new("kill")
.arg(format!( .arg(format!(
@ -238,35 +265,83 @@ impl Ffmpeg {
println!("audio killed"); println!("audio killed");
} }
let video_filename = format!( let video_filename = {
"{}.temp.without.audio.{}", if is_wayland() {
self.saved_filename.as_ref().unwrap(), format!(
self.filename.2.active_id().unwrap() "{}.temp.without.audio.webm",
); self.saved_filename.as_ref().unwrap()
)
} else {
format!(
"{}.temp.without.audio.{}",
self.saved_filename.as_ref().unwrap(),
self.filename.2.active_id().unwrap()
)
}
};
let audio_filename = format!("{}.temp.audio", self.saved_filename.as_ref().unwrap()); let audio_filename = format!("{}.temp.audio", self.saved_filename.as_ref().unwrap());
let is_video_record = std::path::Path::new(video_filename.as_str()).exists(); let is_video_record = {
if is_wayland() {
std::path::Path::new(&format!(
"{}.temp.without.audio.webm",
self.saved_filename.as_ref().unwrap()
))
.exists()
} else {
std::path::Path::new(video_filename.as_str()).exists()
}
};
let is_audio_record = std::path::Path::new(audio_filename.as_str()).exists(); let is_audio_record = std::path::Path::new(audio_filename.as_str()).exists();
if is_video_record { if is_video_record {
let mut move_command = Command::new("mv"); if !is_wayland() {
move_command.args([ let mut move_command = Command::new("mv");
self.saved_filename.as_ref().unwrap().as_str(), move_command.args([
if is_audio_record { self.saved_filename.as_ref().unwrap().as_str(),
video_filename.as_str() if is_audio_record {
} else { video_filename.as_str()
self.saved_filename.as_ref().unwrap() } else {
}, self.saved_filename.as_ref().unwrap()
]); },
move_command.output().unwrap(); ]);
move_command.output().unwrap();
} else {
println!("convert webm to specified format");
self.progress_widget.set_progress("".to_string(), 4, 6); // convert webm to specified format
self.progress_widget.set_progress(
"Convert screen-cast to specified format".to_string(),
4,
7,
);
Command::new("ffmpeg")
.args([
"-i",
format!(
"{}.temp.without.audio.webm",
self.saved_filename.as_ref().unwrap()
)
.as_str(),
"-crf",
"23", // default quality
"-c:a",
self.filename.2.active_id().unwrap().as_str(),
self.saved_filename.as_ref().unwrap(),
"-y",
])
.output()
.unwrap();
}
self.progress_widget.set_progress("".to_string(), 5, 7);
// if audio record, then merge video with audio // if audio record, then merge video with audio
if is_audio_record { if is_audio_record {
self.progress_widget self.progress_widget
.set_progress("Save Audio Recording".to_string(), 4, 6); .set_progress("Save Audio Recording".to_string(), 5, 7);
Command::new("ffmpeg") Command::new("ffmpeg")
.args([ .args([
@ -276,15 +351,17 @@ impl Ffmpeg {
"ogg", "ogg",
"-i", "-i",
audio_filename.as_str(), audio_filename.as_str(),
"-c:v", "-crf",
"copy", "23", // default quality
"-c:a", "-c:a",
"aac", "aac",
self.saved_filename.as_ref().unwrap(), self.saved_filename.as_ref().unwrap(),
"-y", "-y",
]) ])
.output() .spawn()
.expect("failed to merge video with audio"); .expect("failed to merge video with audio")
.wait()
.unwrap();
std::fs::remove_file(video_filename).unwrap(); std::fs::remove_file(video_filename).unwrap();
std::fs::remove_file(audio_filename).unwrap(); std::fs::remove_file(audio_filename).unwrap();
@ -293,7 +370,7 @@ impl Ffmpeg {
// if only audio is recording then convert it to chosen format // if only audio is recording then convert it to chosen format
else if is_audio_record { else if is_audio_record {
self.progress_widget self.progress_widget
.set_progress("Convert Audio to choosen format".to_string(), 4, 6); .set_progress("Convert Audio to choosen format".to_string(), 5, 7);
Command::new("ffmpeg") Command::new("ffmpeg")
.args([ .args([
@ -309,7 +386,7 @@ impl Ffmpeg {
std::fs::remove_file(audio_filename).unwrap(); std::fs::remove_file(audio_filename).unwrap();
} }
self.progress_widget.set_progress("".to_string(), 5, 6); self.progress_widget.set_progress("".to_string(), 6, 7);
// execute command after finish recording // execute command after finish recording
if self.command.text().trim() != "" { if self.command.text().trim() != "" {
@ -322,7 +399,7 @@ impl Ffmpeg {
} }
self.progress_widget self.progress_widget
.set_progress("Finish".to_string(), 6, 6); .set_progress("Finish".to_string(), 7, 7);
self.progress_widget.hide(); self.progress_widget.hide();
} }
@ -344,7 +421,3 @@ impl Ffmpeg {
} }
} }
} }
fn is_snap() -> bool {
!std::env::var("SNAP").unwrap_or_default().is_empty()
}

View File

@ -448,6 +448,8 @@ pub fn build_ui(application: &Application) {
let _delay_spin = delay_spin.clone(); let _delay_spin = delay_spin.clone();
let main_context = glib::MainContext::default();
let wayland_record = main_context.block_on(WaylandRecorder::new());
// Init record struct // Init record struct
let ffmpeg_record_interface: Rc<RefCell<Ffmpeg>> = Rc::new(RefCell::new(Ffmpeg { let ffmpeg_record_interface: Rc<RefCell<Ffmpeg>> = Rc::new(RefCell::new(Ffmpeg {
filename: ( filename: (
@ -469,7 +471,8 @@ pub fn build_ui(application: &Application) {
progress_widget: ProgressWidget::new(progress_dialog, progress_bar), progress_widget: ProgressWidget::new(progress_dialog, progress_bar),
window: main_window.clone(), window: main_window.clone(),
record_delay: delay_spin, record_delay: delay_spin,
record_wayland: glib::MainContext::default().block_on(WaylandRecorder::new()) record_wayland: wayland_record,
main_context: main_context,
})); }));
// Record Button // Record Button

View File

@ -3,3 +3,7 @@ pub fn is_wayland() -> bool {
.unwrap_or_default() .unwrap_or_default()
.eq_ignore_ascii_case("wayland") .eq_ignore_ascii_case("wayland")
} }
pub fn is_snap() -> bool {
!std::env::var("SNAP").unwrap_or_default().is_empty()
}

View File

@ -5,7 +5,7 @@ use zbus::{
dbus_proxy, dbus_proxy,
export::futures_util::TryStreamExt, export::futures_util::TryStreamExt,
zvariant::{ObjectPath, OwnedObjectPath, Structure, Value}, zvariant::{ObjectPath, OwnedObjectPath, Structure, Value},
Connection, MessageStream, MessageType, Result, Connection, MessageStream, MessageType, Result
}; };
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -13,6 +13,7 @@ pub enum RecordTypes {
Default, Default,
Monitor, Monitor,
Window, Window,
MonitorOrWindow,
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -46,6 +47,7 @@ trait ScreenCast {
pub struct WaylandRecorder { pub struct WaylandRecorder {
connection: Connection, connection: Connection,
screen_cast_proxy: ScreenCastProxy<'static>, screen_cast_proxy: ScreenCastProxy<'static>,
session_path: String,
pipeline: Option<gst::Pipeline>, pipeline: Option<gst::Pipeline>,
filename: String, filename: String,
} }
@ -53,32 +55,43 @@ pub struct WaylandRecorder {
impl WaylandRecorder { impl WaylandRecorder {
pub async fn new() -> Self { pub async fn new() -> Self {
let connection = Connection::session().await.expect("failed to connect to session bus"); let connection = Connection::session().await.expect("failed to connect to session bus");
let screen_cast_proxy = ScreenCastProxy::new(&connection).await.expect("failed to create dbus proxy"); let screen_cast_proxy = ScreenCastProxy::new(&connection).await.expect("failed to create dbus proxy for screen-cast");
gst::init().expect("failed to initialize gstreamer"); gst::init().expect("failed to initialize gstreamer");
WaylandRecorder { WaylandRecorder {
connection, connection,
screen_cast_proxy, screen_cast_proxy,
session_path: String::new(),
filename: String::from("blue_recorder.webm"), filename: String::from("blue_recorder.webm"),
pipeline: None, pipeline: None,
} }
} }
pub async fn start(&mut self, record_type: RecordTypes, cursor_mode_type: CursorModeTypes) -> Result<()> { pub async fn start(&mut self, filename: String, record_type: RecordTypes, cursor_mode_type: CursorModeTypes) -> bool {
self.screen_cast_proxy.create_session(HashMap::from([ self.screen_cast_proxy.create_session(HashMap::from([
("handle_token", Value::from("blue_recorder_1")), ("handle_token", Value::from("blue_recorder_1")),
("session_handle_token", Value::from("blue_recorder_1")), ("session_handle_token", Value::from("blue_recorder_1")),
])) ]))
.await?; .await.expect("failed to create session");
let mut message_stream = MessageStream::from(self.connection.clone()); let mut message_stream = MessageStream::from(self.connection.clone());
while let Some(msg) = message_stream.try_next().await? { self.filename = filename.clone();
let mut first_empty_signal_called = false;
while let Some(msg) = message_stream.try_next().await.expect("failed to get message") {
match msg.message_type() { match msg.message_type() {
MessageType::Signal => { MessageType::Signal => {
let (_, response) = msg.body::<(u32, HashMap<&str, Value>)>()?; let (_, response) = msg.body::<(u32, HashMap<&str, Value>)>().expect("failed to get body");
if response.len() == 0 { if response.len() == 0 {
continue; if first_empty_signal_called {
return false;
} else {
first_empty_signal_called = true;
continue;
}
} }
if response.contains_key("session_handle") { if response.contains_key("session_handle") {
@ -88,35 +101,38 @@ impl WaylandRecorder {
record_type, record_type,
cursor_mode_type cursor_mode_type
) )
.await?; .await.expect("failed to handle session");
continue; continue;
} }
if response.contains_key("streams") { if response.contains_key("streams") {
// TODO: start recording on separate thread self.record_screen_cast(response.clone()).await.expect("failed to record screen cast");
self.record_screen_cast(response.clone()).await?;
break; break;
} }
} }
MessageType::MethodReturn => {
println!("\n\nMethodReturn message: {:?}", msg);
}
_ => { _ => {
println!("\n\nUnkown message: {:?}", msg); println!("\n\nUnkown message: {:?}", msg);
} }
} }
} }
Ok(()) true
} }
pub fn stop(self) { pub async fn stop(&mut self) {
if let Some(pipeline) = self.pipeline { if let Some(pipeline) = self.pipeline.clone() {
pipeline pipeline
.set_state(gst::State::Null) .set_state(gst::State::Null)
.expect("failed to stop pipeline"); .expect("failed to stop pipeline");
} }
if self.session_path.len() > 0 {
println!("Closing session...: {:?}", self.session_path.replace("request", "session"));
self.connection.clone().call_method(Some("org.freedesktop.portal.Desktop"), self.session_path.clone().replace("request", "session"), Some("org.freedesktop.portal.Session"), "Close", &()).await.expect("failed to close session");
self.session_path = String::new();
}
} }
async fn handle_session( async fn handle_session(
&mut self, &mut self,
screen_cast_proxy: ScreenCastProxy<'_>, screen_cast_proxy: ScreenCastProxy<'_>,
@ -131,6 +147,8 @@ impl WaylandRecorder {
.downcast::<String>() .downcast::<String>()
.expect("cannot down cast session_handle"); .expect("cannot down cast session_handle");
self.session_path = response_session_handle.clone();
screen_cast_proxy screen_cast_proxy
.select_sources( .select_sources(
ObjectPath::try_from(response_session_handle.clone())?, ObjectPath::try_from(response_session_handle.clone())?,
@ -141,6 +159,7 @@ impl WaylandRecorder {
Value::from(match record_type { Value::from(match record_type {
RecordTypes::Monitor => 1u32, RecordTypes::Monitor => 1u32,
RecordTypes::Window => 2u32, RecordTypes::Window => 2u32,
RecordTypes::MonitorOrWindow => 3u32,
_ => 0u32, _ => 0u32,
}), }),
), ),