Open Decodetalkers opened 11 months ago
I test it with
use ashpd::{
desktop::{
remote_desktop::{DeviceType, KeyState, RemoteDesktop},
screencast::{CursorMode, PersistMode, Screencast, SourceType},
},
WindowIdentifier,
};
//use gstreamer::{prelude::ObjectExt, traits::ElementExt, MessageType};
use gstreamer::{
prelude::{ElementExtManual, GstBinExtManual},
traits::ElementExt,
MessageType,
};
//use gstreamer::{traits::ElementExt, MessageType, prelude::GstBinExtManual};
use std::os::unix::io::AsRawFd;
fn screen_gstreamer<F: AsRawFd>(fd: F, node_id: Option<u32>) -> anyhow::Result<()> {
gstreamer::init()?;
let raw_fd = fd.as_raw_fd();
let element = gstreamer::Pipeline::new(None);
let videoconvert = gstreamer::ElementFactory::make("videoconvert").build()?;
let ximagesink = gstreamer::ElementFactory::make("ximagesink").build()?;
if let Some(node) = node_id {
let pipewire_element = gstreamer::ElementFactory::make("pipewiresrc")
.property("fd", &raw_fd)
.property("path", &node.to_string())
.build()?;
element.add_many(&[&pipewire_element, &videoconvert, &ximagesink])?;
pipewire_element.link(&videoconvert)?;
videoconvert.link(&ximagesink)?;
element.set_state(gstreamer::State::Playing)?;
let bus = element.bus().unwrap();
loop {
let message = bus.timed_pop_filtered(
Some(gstreamer::ClockTime::from_useconds(1)),
&[MessageType::Error, MessageType::Eos],
);
if let Some(message) = message {
println!("Here is message");
match message.type_() {
MessageType::Eos => {
println!("End");
break;
}
MessageType::Error => {
println!("{:?}", message);
println!("Error");
break;
}
_ => continue,
}
}
}
element.set_state(gstreamer::State::Null)?;
}
Ok(())
}
#[tokio::main]
async fn main() -> ashpd::Result<()> {
let remote_desktop = RemoteDesktop::new().await?;
let screencast = Screencast::new().await?;
let identifier = WindowIdentifier::default();
let session = remote_desktop.create_session().await?;
remote_desktop
.select_devices(&session, DeviceType::Keyboard | DeviceType::Pointer)
.await?;
screencast
.select_sources(
&session,
CursorMode::Hidden,
SourceType::Monitor | SourceType::Window,
true,
None,
PersistMode::DoNot,
)
.await?;
let response = remote_desktop
.start(&session, &identifier)
.await?
.response()?;
println!("{:#?}", response.devices());
println!("{:#?}", response.streams());
let fd = screencast.open_pipe_wire_remote(&session).await?;
tokio::spawn(async move {
loop {
// 13 for Enter key code
remote_desktop
.notify_keyboard_keycode(&session, 13, KeyState::Pressed)
.await?;
tokio::time::sleep(std::time::Duration::from_nanos(100)).await;
}
Ok::<(), ashpd::Error>(())
});
response.streams().iter().for_each(|stream| {
for stream in stream.iter() {
println!("node id: {}", stream.pipe_wire_node_id());
println!("size: {:?}", stream.size());
println!("position: {:?}", stream.position());
screen_gstreamer(fd, Some(stream.pipe_wire_node_id())).unwrap();
}
});
Ok(())
}
I'm trying to port your xkb utilization to xdg-desktop-portal-wlr and along the way I think I discovered that your notify_keyboard_keysym
implementation is incorrect as it does nothing to transform the keysym into a keycode before passing it to the virtual keyboard. It seems your tests only do the keycode variant so that would explain why you didn't bump into the issue. I don't currently know how to ask xkb what keycode to use but I thought I'd point this out anyway
thanks you ..I learn too little about xkb.. I am reading the document now, thanks @Enovale , can you provide me with some examples?
I just use ashpd to test keyboard and pointer, keyboard works, and screencast is also work.
if there is any problem , you should post issue, or to make pr, thanks
I do not know how may people use remote interface..