diff --git a/.mise.toml b/.mise.toml index 71e6a90..756f189 100644 --- a/.mise.toml +++ b/.mise.toml @@ -114,12 +114,60 @@ description = "Build the comm1 workflow WASM module" dir = "services/ws-modules/comm1" run = "wasm-pack build . --target web" +[tasks.build-ws-sensor1-module] +description = "Build the sensor1 workflow WASM module" +dir = "services/ws-modules/sensor1" +run = "wasm-pack build . --target web" + +[tasks.build-ws-audio1-module] +description = "Build the audio1 workflow WASM module" +dir = "services/ws-modules/audio1" +run = "wasm-pack build . --target web" + +[tasks.build-ws-video1-module] +description = "Build the video1 workflow WASM module" +dir = "services/ws-modules/video1" +run = "wasm-pack build . --target web" + +[tasks.build-ws-bluetooth-module] +description = "Build the bluetooth workflow WASM module" +dir = "services/ws-modules/bluetooth" +run = "wasm-pack build . --target web" + +[tasks.build-ws-geolocation-module] +description = "Build the geolocation workflow WASM module" +dir = "services/ws-modules/geolocation" +run = "wasm-pack build . --target web" + +[tasks.build-ws-graphics-info-module] +description = "Build the graphics info workflow WASM module" +dir = "services/ws-modules/graphics-info" +run = "wasm-pack build . --target web" + +[tasks.build-ws-speech-recognition-module] +description = "Build the speech recognition workflow WASM module" +dir = "services/ws-modules/speech-recognition" +run = "wasm-pack build . --target web" + +[tasks.build-ws-nfc-module] +description = "Build the nfc workflow WASM module" +dir = "services/ws-modules/nfc" +run = "wasm-pack build . --target web" + [tasks.build-wasm] depends = [ + "build-ws-audio1-module", + "build-ws-bluetooth-module", "build-ws-comm1-module", "build-ws-data1-module", "build-ws-face-detection-module", + "build-ws-geolocation-module", + "build-ws-graphics-info-module", "build-ws-har1-module", + "build-ws-nfc-module", + "build-ws-sensor1-module", + "build-ws-speech-recognition-module", + "build-ws-video1-module", "build-ws-wasm-agent", ] description = "Build all WebAssembly modules" diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..3cc82ff --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,13 @@ +{ + "recommendations": [ + "davidanson.vscode-markdownlint", + "editorconfig.editorconfig", + "fill-labs.dependi", + "github.vscode-github-actions", + "ms-azuretools.vscode-docker", + "rust-lang.rust-analyzer", + "streetsidesoftware.code-spell-checker", + "tamasfe.even-better-toml", + "yzhang.markdown-all-in-one" + ] +} diff --git a/Cargo.toml b/Cargo.toml index b3b0520..1b03869 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,10 +7,19 @@ rust-version = "1.87.0" [workspace] members = [ "libs/edge-toolkit", + "libs/web", + "services/ws-modules/audio1", + "services/ws-modules/bluetooth", "services/ws-modules/comm1", "services/ws-modules/data1", "services/ws-modules/face-detection", + "services/ws-modules/geolocation", + "services/ws-modules/graphics-info", "services/ws-modules/har1", + "services/ws-modules/nfc", + "services/ws-modules/sensor1", + "services/ws-modules/speech-recognition", + "services/ws-modules/video1", "services/ws-server", "services/ws-wasm-agent", ] @@ -18,6 +27,7 @@ resolver = "2" [workspace.dependencies] chrono = { version = "0.4", features = ["serde"] } +et-web = { path = "libs/web" } serde = { version = "1", features = ["derive"] } serde_json = "1" tracing = "0.1" diff --git a/libs/web/Cargo.toml b/libs/web/Cargo.toml new file mode 100644 index 0000000..6f001c5 --- /dev/null +++ b/libs/web/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "et-web" +description = "Web helpers for WASM modules" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +js-sys = "0.3" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = [ + "MediaDevices", + "MediaStream", + "MediaStreamConstraints", + "MediaStreamTrack", + "MessageEvent", + "Navigator", +] } diff --git a/libs/web/src/lib.rs b/libs/web/src/lib.rs new file mode 100644 index 0000000..9682a42 --- /dev/null +++ b/libs/web/src/lib.rs @@ -0,0 +1,40 @@ +use wasm_bindgen::prelude::*; + +pub const SENSOR_PERMISSION_GRANTED: &str = "granted"; + +pub fn get_media_devices(navigator: &web_sys::Navigator) -> Result { + let media_devices = js_sys::Reflect::get(navigator, &JsValue::from_str("mediaDevices"))?; + + if media_devices.is_undefined() || media_devices.is_null() { + return Err(JsValue::from_str( + "navigator.mediaDevices is unavailable. Use https://... or http://localhost and allow access.", + )); + } + + media_devices + .dyn_into::() + .map_err(|_| JsValue::from_str("navigator.mediaDevices is not accessible in this browser")) +} + +pub async fn request_sensor_permission(target: JsValue) -> Result { + if target.is_null() || target.is_undefined() { + return Ok(SENSOR_PERMISSION_GRANTED.to_string()); + } + + let request_permission = js_sys::Reflect::get(&target, &JsValue::from_str("requestPermission"))?; + if request_permission.is_null() || request_permission.is_undefined() { + return Ok(SENSOR_PERMISSION_GRANTED.to_string()); + } + + let request_permission = request_permission + .dyn_into::() + .map_err(|_| JsValue::from_str("requestPermission is not callable"))?; + let promise = request_permission + .call0(&target)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestPermission did not return a Promise"))?; + let result = wasm_bindgen_futures::JsFuture::from(promise).await?; + Ok(result + .as_string() + .unwrap_or_else(|| SENSOR_PERMISSION_GRANTED.to_string())) +} diff --git a/services/ws-modules/audio1/Cargo.toml b/services/ws-modules/audio1/Cargo.toml new file mode 100644 index 0000000..b49fd91 --- /dev/null +++ b/services/ws-modules/audio1/Cargo.toml @@ -0,0 +1,34 @@ +[package] +description = "audio capture module" +name = "et-ws-audio1" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-web.workspace = true +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = [ + "MediaDevices", + "MediaStream", + "MediaStreamConstraints", + "MediaStreamTrack", + "Navigator", + "Window", + "console", +] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/audio1/src/lib.rs b/services/ws-modules/audio1/src/lib.rs new file mode 100644 index 0000000..e25c220 --- /dev/null +++ b/services/ws-modules/audio1/src/lib.rs @@ -0,0 +1,234 @@ +use std::cell::RefCell; + +use et_web::get_media_devices; +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; +use web_sys::{MediaStream, MediaStreamConstraints}; + +#[wasm_bindgen] +pub struct MicrophoneAccess { + stream: MediaStream, +} + +#[wasm_bindgen] +impl MicrophoneAccess { + #[wasm_bindgen(js_name = request)] + pub async fn request() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let media_devices = get_media_devices(&window.navigator())?; + + let constraints = MediaStreamConstraints::new(); + constraints.set_audio(&JsValue::TRUE); + constraints.set_video(&JsValue::FALSE); + + let promise = media_devices.get_user_media_with_constraints(&constraints)?; + let stream = JsFuture::from(promise).await?; + let stream: MediaStream = stream + .dyn_into() + .map_err(|_| JsValue::from_str("getUserMedia did not return a MediaStream"))?; + + info!( + "Microphone access granted with {} audio track(s)", + stream.get_audio_tracks().length() + ); + + Ok(MicrophoneAccess { stream }) + } + + #[wasm_bindgen(js_name = trackCount)] + pub fn track_count(&self) -> u32 { + self.stream.get_audio_tracks().length() + } + + #[wasm_bindgen(js_name = rawStream)] + pub fn raw_stream(&self) -> JsValue { + self.stream.clone().into() + } + + pub fn stop(&self) { + let tracks = self.stream.get_tracks(); + for index in 0..tracks.length() { + if let Some(track) = tracks.get(index).dyn_ref::() { + track.stop(); + } + } + info!("Microphone tracks stopped"); + } +} + +struct AudioCaptureRuntime { + client: WsClient, + access: MicrophoneAccess, +} + +thread_local! { + static AUDIO_CAPTURE_RUNTIME: RefCell> = const { RefCell::new(None) }; +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("audio-capture module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + AUDIO_CAPTURE_RUNTIME.with(|runtime| runtime.borrow().is_some()) +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + if is_running() { + return Ok(()); + } + + set_module_status("audio-capture: entered run()")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("requesting microphone access")?; + let access = MicrophoneAccess::request().await?; + let tracks = access.track_count(); + log(&format!("microphone access granted: {} tracks", tracks))?; + + client.send_client_event( + "audio", + "access_granted", + json!({ + "track_count": tracks, + }), + )?; + + set_module_status("audio-capture: running")?; + + AUDIO_CAPTURE_RUNTIME.with(|runtime| { + runtime.borrow_mut().replace(AudioCaptureRuntime { client, access }); + }); + + let stop_callback = Closure::once_into_js(move || { + if is_running() { + let _ = log("workflow finished automatically after 5 seconds"); + let _ = stop(); + } + }); + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + window.set_timeout_with_callback_and_timeout_and_arguments_0(stop_callback.unchecked_ref(), 5000)?; + + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let _ = set_module_status(&format!("audio-capture: error\n{}", message)); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +#[wasm_bindgen] +pub fn stop() -> Result<(), JsValue> { + AUDIO_CAPTURE_RUNTIME.with(|runtime| { + if let Some(mut runtime) = runtime.borrow_mut().take() { + runtime.access.stop(); + runtime.client.disconnect(); + log("audio-capture stopped")?; + } + Ok::<(), JsValue>(()) + })?; + + set_module_status("audio-capture: stopped")?; + Ok(()) +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[audio-capture] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/bluetooth/Cargo.toml b/services/ws-modules/bluetooth/Cargo.toml new file mode 100644 index 0000000..792e9ea --- /dev/null +++ b/services/ws-modules/bluetooth/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "bluetooth access module" +name = "et-ws-bluetooth" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/bluetooth/src/lib.rs b/services/ws-modules/bluetooth/src/lib.rs new file mode 100644 index 0000000..f56df8c --- /dev/null +++ b/services/ws-modules/bluetooth/src/lib.rs @@ -0,0 +1,226 @@ +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; + +#[wasm_bindgen] +pub struct BluetoothAccess { + device: JsValue, +} + +#[wasm_bindgen] +impl BluetoothAccess { + #[wasm_bindgen(js_name = request)] + pub async fn request() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let navigator = window.navigator(); + let bluetooth = js_sys::Reflect::get(&navigator, &JsValue::from_str("bluetooth"))?; + if bluetooth.is_undefined() || bluetooth.is_null() { + return Err(JsValue::from_str( + "Web Bluetooth is not available in this browser context", + )); + } + + let options = js_sys::Object::new(); + js_sys::Reflect::set(&options, &JsValue::from_str("acceptAllDevices"), &JsValue::TRUE)?; + + let request_device = js_sys::Reflect::get(&bluetooth, &JsValue::from_str("requestDevice"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("navigator.bluetooth.requestDevice is not callable"))?; + let promise = request_device + .call1(&bluetooth, &options)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestDevice did not return a Promise"))?; + let device = JsFuture::from(promise).await?; + + info!( + "Bluetooth device selected: {:?}", + js_sys::Reflect::get(&device, &JsValue::from_str("name")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()) + ); + + Ok(BluetoothAccess { device }) + } + + pub fn id(&self) -> String { + js_sys::Reflect::get(&self.device, &JsValue::from_str("id")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_default() + } + + pub fn name(&self) -> String { + js_sys::Reflect::get(&self.device, &JsValue::from_str("name")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()) + } + + #[wasm_bindgen(js_name = gattConnected)] + pub fn gatt_connected(&self) -> bool { + js_sys::Reflect::get(&self.device, &JsValue::from_str("gatt")) + .ok() + .filter(|gatt| !gatt.is_null() && !gatt.is_undefined()) + .and_then(|gatt| js_sys::Reflect::get(&gatt, &JsValue::from_str("connected")).ok()) + .and_then(|value| value.as_bool()) + .unwrap_or(false) + } + + #[wasm_bindgen(js_name = connectGatt)] + pub async fn connect_gatt(&self) -> Result<(), JsValue> { + let gatt = js_sys::Reflect::get(&self.device, &JsValue::from_str("gatt"))?; + if gatt.is_null() || gatt.is_undefined() { + return Err(JsValue::from_str("Selected device has no GATT server")); + } + + let connect = js_sys::Reflect::get(&gatt, &JsValue::from_str("connect"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("device.gatt.connect is not callable"))?; + let promise = connect + .call0(&gatt)? + .dyn_into::() + .map_err(|_| JsValue::from_str("device.gatt.connect did not return a Promise"))?; + let _server = JsFuture::from(promise).await?; + info!("Connected to Bluetooth GATT server for {}", self.name()); + Ok(()) + } +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("bluetooth module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + false +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + set_module_status("bluetooth: entered run()")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("requesting bluetooth access")?; + let access = BluetoothAccess::request().await?; + let id = access.id(); + let name = access.name(); + log(&format!("bluetooth device selected: {} ({})", name, id))?; + + client.send_client_event( + "bluetooth", + "device_selected", + json!({ + "id": id, + "name": name, + }), + )?; + + set_module_status(&format!("bluetooth: device selected\n{} ({})", name, id))?; + + client.disconnect(); + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let _ = set_module_status(&format!("bluetooth: error\n{}", message)); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[bluetooth] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/face-detection/Cargo.toml b/services/ws-modules/face-detection/Cargo.toml index 4a6deae..53f0206 100644 --- a/services/ws-modules/face-detection/Cargo.toml +++ b/services/ws-modules/face-detection/Cargo.toml @@ -10,6 +10,7 @@ repository.workspace = true crate-type = ["cdylib", "rlib"] [dependencies] +et-web.workspace = true et-ws-wasm-agent = { path = "../../ws-wasm-agent" } js-sys = "0.3" serde.workspace = true diff --git a/services/ws-modules/face-detection/src/lib.rs b/services/ws-modules/face-detection/src/lib.rs index 82fa4e1..863ed6d 100644 --- a/services/ws-modules/face-detection/src/lib.rs +++ b/services/ws-modules/face-detection/src/lib.rs @@ -1,13 +1,15 @@ use std::cell::{Cell, RefCell}; use std::rc::Rc; -use et_ws_wasm_agent::{VideoCapture, WsClient, WsClientConfig, set_textarea_value}; +use et_web::get_media_devices; +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; use js_sys::{Array, Float32Array, Function, Promise, Reflect}; use serde_json::json; use tracing::info; use wasm_bindgen::JsCast; use wasm_bindgen::prelude::*; use wasm_bindgen_futures::{JsFuture, spawn_local}; +use web_sys::MediaStreamConstraints; use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement, HtmlVideoElement, ImageData, MediaStream}; const FACE_MODEL_PATH: &str = "/static/models/video_cv.onnx"; @@ -54,6 +56,57 @@ struct FaceDetectionRuntime { _render_closure: Closure, } +#[wasm_bindgen] +pub struct VideoCapture { + stream: MediaStream, +} + +#[wasm_bindgen] +impl VideoCapture { + #[wasm_bindgen(js_name = request)] + pub async fn request() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let media_devices = get_media_devices(&window.navigator())?; + + let constraints = MediaStreamConstraints::new(); + constraints.set_audio(&JsValue::FALSE); + constraints.set_video(&JsValue::TRUE); + + let promise = media_devices.get_user_media_with_constraints(&constraints)?; + let stream = JsFuture::from(promise).await?; + let stream: MediaStream = stream + .dyn_into() + .map_err(|_| JsValue::from_str("getUserMedia did not return a MediaStream"))?; + + info!( + "Video capture granted with {} video track(s)", + stream.get_video_tracks().length() + ); + + Ok(VideoCapture { stream }) + } + + #[wasm_bindgen(js_name = trackCount)] + pub fn track_count(&self) -> u32 { + self.stream.get_video_tracks().length() + } + + #[wasm_bindgen(js_name = rawStream)] + pub fn raw_stream(&self) -> JsValue { + self.stream.clone().into() + } + + pub fn stop(&self) { + let tracks = self.stream.get_tracks(); + for index in 0..tracks.length() { + if let Some(track) = tracks.get(index).dyn_ref::() { + track.stop(); + } + } + info!("Video capture tracks stopped"); + } +} + thread_local! { static FACE_RUNTIME: RefCell> = const { RefCell::new(None) }; static FACE_PREPROCESS_CANVAS: RefCell> = const { RefCell::new(None) }; @@ -133,6 +186,7 @@ pub async fn run() -> Result<(), JsValue> { let last_summary: Rc>> = Rc::new(RefCell::new(None)); let inference_pending = Rc::new(Cell::new(false)); let last_has_detection = Rc::new(Cell::new(false)); + let inference_count = Rc::new(Cell::new(0)); let inference_session = session.clone(); let inference_input_name = input_name.clone(); @@ -141,11 +195,16 @@ pub async fn run() -> Result<(), JsValue> { let inference_last_summary = last_summary.clone(); let inference_pending_flag = inference_pending.clone(); let inference_last_has_detection = last_has_detection.clone(); + let inference_count_ref = inference_count.clone(); let inference_closure = Closure::wrap(Box::new(move || { if inference_pending_flag.get() { return; } + if inference_count_ref.get() >= 20 { + return; + } + inference_pending_flag.set(true); let session = inference_session.clone(); let input_name = inference_input_name.clone(); @@ -154,14 +213,23 @@ pub async fn run() -> Result<(), JsValue> { let last_summary = inference_last_summary.clone(); let pending_flag = inference_pending_flag.clone(); let last_has_detection = inference_last_has_detection.clone(); + let count_ref = inference_count_ref.clone(); spawn_local(async move { let outcome = infer_once(&session, &input_name, &output_names, &client, &last_has_detection).await; match outcome { Ok(summary) => { + let count = count_ref.get() + 1; + count_ref.set(count); + update_face_status(&input_name, &output_names, &summary); *last_summary.borrow_mut() = Some(summary); + + if count >= 20 { + let _ = log("workflow finished automatically after 20 inferences"); + let _ = stop(); + } } Err(error) => { let message = describe_js_error(&error); @@ -194,6 +262,14 @@ pub async fn run() -> Result<(), JsValue> { FACE_RENDER_INTERVAL_MS, )?; + let stop_callback = Closure::once_into_js(move || { + if is_running() { + let _ = log("workflow finished automatically after 30 seconds"); + let _ = stop(); + } + }); + window.set_timeout_with_callback_and_timeout_and_arguments_0(stop_callback.unchecked_ref(), 30000)?; + let startup_summary = DetectionSummary { detections: Vec::new(), confidence: 0.0, diff --git a/services/ws-modules/geolocation/Cargo.toml b/services/ws-modules/geolocation/Cargo.toml new file mode 100644 index 0000000..6bfb264 --- /dev/null +++ b/services/ws-modules/geolocation/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "geolocation module" +name = "et-ws-geolocation" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/geolocation/src/lib.rs b/services/ws-modules/geolocation/src/lib.rs new file mode 100644 index 0000000..8b114ca --- /dev/null +++ b/services/ws-modules/geolocation/src/lib.rs @@ -0,0 +1,241 @@ +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; + +#[wasm_bindgen] +pub struct GeolocationReading { + latitude: f64, + longitude: f64, + accuracy_meters: f64, +} + +#[wasm_bindgen] +impl GeolocationReading { + #[wasm_bindgen(js_name = request)] + pub async fn request() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let navigator = window.navigator(); + let geolocation = js_sys::Reflect::get(&navigator, &JsValue::from_str("geolocation"))?; + if geolocation.is_undefined() || geolocation.is_null() { + return Err(JsValue::from_str( + "navigator.geolocation is unavailable. Use https://... or http://localhost and allow access.", + )); + } + + let options = js_sys::Object::new(); + js_sys::Reflect::set(&options, &JsValue::from_str("enableHighAccuracy"), &JsValue::TRUE)?; + js_sys::Reflect::set(&options, &JsValue::from_str("maximumAge"), &JsValue::from_f64(0.0))?; + js_sys::Reflect::set(&options, &JsValue::from_str("timeout"), &JsValue::from_f64(10_000.0))?; + + let promise = js_sys::Promise::new(&mut |resolve, reject| { + let reject_for_callback = reject.clone(); + let success = Closure::once(Box::new(move |position: JsValue| { + let _ = resolve.call1(&JsValue::NULL, &position); + }) as Box); + + let failure = Closure::once(Box::new(move |error: JsValue| { + let _ = reject_for_callback.call1(&JsValue::NULL, &error); + }) as Box); + + let get_current_position = js_sys::Reflect::get(&geolocation, &JsValue::from_str("getCurrentPosition")) + .ok() + .and_then(|value| value.dyn_into::().ok()); + + if let Some(get_current_position) = get_current_position { + let _ = get_current_position.call3( + &geolocation, + success.as_ref().unchecked_ref(), + failure.as_ref().unchecked_ref(), + &options, + ); + } else { + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str("navigator.geolocation.getCurrentPosition is not callable"), + ); + } + + success.forget(); + failure.forget(); + }); + + let position = JsFuture::from(promise).await?; + let coords = js_sys::Reflect::get(&position, &JsValue::from_str("coords"))?; + let latitude = js_sys::Reflect::get(&coords, &JsValue::from_str("latitude"))? + .as_f64() + .ok_or_else(|| JsValue::from_str("Geolocation latitude is missing"))?; + let longitude = js_sys::Reflect::get(&coords, &JsValue::from_str("longitude"))? + .as_f64() + .ok_or_else(|| JsValue::from_str("Geolocation longitude is missing"))?; + let accuracy_meters = js_sys::Reflect::get(&coords, &JsValue::from_str("accuracy"))? + .as_f64() + .ok_or_else(|| JsValue::from_str("Geolocation accuracy is missing"))?; + + info!( + "Geolocation reading acquired: latitude={} longitude={} accuracy={}m", + latitude, longitude, accuracy_meters + ); + + Ok(GeolocationReading { + latitude, + longitude, + accuracy_meters, + }) + } + + pub fn latitude(&self) -> f64 { + self.latitude + } + + pub fn longitude(&self) -> f64 { + self.longitude + } + + #[wasm_bindgen(js_name = accuracyMeters)] + pub fn accuracy_meters(&self) -> f64 { + self.accuracy_meters + } +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("geolocation module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + false +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + set_module_status("geolocation: entered run()")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("requesting geolocation access")?; + let reading = GeolocationReading::request().await?; + let lat = reading.latitude(); + let lon = reading.longitude(); + let acc = reading.accuracy_meters(); + log(&format!("geolocation acquired: lat={} lon={} acc={}m", lat, lon, acc))?; + + client.send_client_event( + "geolocation", + "reading_acquired", + json!({ + "latitude": lat, + "longitude": lon, + "accuracy": acc, + }), + )?; + + set_module_status(&format!( + "geolocation: reading acquired\nlat: {}\nlon: {}\nacc: {}m", + lat, lon, acc + ))?; + + client.disconnect(); + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let _ = set_module_status(&format!("geolocation: error\n{}", message)); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[geolocation] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/graphics-info/Cargo.toml b/services/ws-modules/graphics-info/Cargo.toml new file mode 100644 index 0000000..9465e9e --- /dev/null +++ b/services/ws-modules/graphics-info/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "graphics info module" +name = "et-ws-graphics-info" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Document", "Element", "HtmlCanvasElement", "Navigator", "Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/graphics-info/src/lib.rs b/services/ws-modules/graphics-info/src/lib.rs new file mode 100644 index 0000000..bbb71f2 --- /dev/null +++ b/services/ws-modules/graphics-info/src/lib.rs @@ -0,0 +1,504 @@ +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; +use web_sys::HtmlCanvasElement; + +#[wasm_bindgen] +pub struct GraphicsSupport { + webgl_supported: bool, + webgl2_supported: bool, + webgpu_supported: bool, + webnn_supported: bool, +} + +#[wasm_bindgen] +impl GraphicsSupport { + #[wasm_bindgen(js_name = detect)] + pub fn detect() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let document = window + .document() + .ok_or_else(|| JsValue::from_str("No document available"))?; + let canvas = document + .create_element("canvas")? + .dyn_into::() + .map_err(|_| JsValue::from_str("Failed to create canvas element"))?; + + let webgl_supported = canvas.get_context("webgl")?.is_some(); + let webgl2_supported = canvas.get_context("webgl2")?.is_some(); + let webgpu_supported = js_sys::Reflect::get(&window.navigator(), &JsValue::from_str("gpu"))?.is_object(); + let webnn_supported = js_sys::Reflect::get(&window.navigator(), &JsValue::from_str("ml"))?.is_object(); + + info!( + "Graphics support detected: webgl={} webgl2={} webgpu={} webnn={}", + webgl_supported, webgl2_supported, webgpu_supported, webnn_supported + ); + + Ok(GraphicsSupport { + webgl_supported, + webgl2_supported, + webgpu_supported, + webnn_supported, + }) + } + + #[wasm_bindgen(js_name = webglSupported)] + pub fn webgl_supported(&self) -> bool { + self.webgl_supported + } + + #[wasm_bindgen(js_name = webgl2Supported)] + pub fn webgl2_supported(&self) -> bool { + self.webgl2_supported + } + + #[wasm_bindgen(js_name = webgpuSupported)] + pub fn webgpu_supported(&self) -> bool { + self.webgpu_supported + } + + #[wasm_bindgen(js_name = webnnSupported)] + pub fn webnn_supported(&self) -> bool { + self.webnn_supported + } +} + +#[wasm_bindgen] +pub struct WebGpuProbeResult { + adapter_found: bool, + device_created: bool, +} + +#[wasm_bindgen] +impl WebGpuProbeResult { + #[wasm_bindgen(js_name = test)] + pub async fn test() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let navigator = window.navigator(); + let gpu = js_sys::Reflect::get(&navigator, &JsValue::from_str("gpu"))?; + + if gpu.is_null() || gpu.is_undefined() { + return Ok(WebGpuProbeResult { + adapter_found: false, + device_created: false, + }); + } + + let request_adapter = js_sys::Reflect::get(&gpu, &JsValue::from_str("requestAdapter"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("navigator.gpu.requestAdapter is not callable"))?; + + let adapter_promise = request_adapter + .call0(&gpu)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestAdapter did not return a Promise"))?; + let adapter = JsFuture::from(adapter_promise).await?; + + if adapter.is_null() || adapter.is_undefined() { + info!("WebGPU probe: no adapter available"); + return Ok(WebGpuProbeResult { + adapter_found: false, + device_created: false, + }); + } + + let request_device = js_sys::Reflect::get(&adapter, &JsValue::from_str("requestDevice"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("adapter.requestDevice is not callable"))?; + + let device_promise = request_device + .call0(&adapter)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestDevice did not return a Promise"))?; + let device = JsFuture::from(device_promise).await?; + + let device_created = !device.is_null() && !device.is_undefined(); + info!( + "WebGPU probe completed: adapter_found=true device_created={}", + device_created + ); + + Ok(WebGpuProbeResult { + adapter_found: true, + device_created, + }) + } + + #[wasm_bindgen(js_name = adapterFound)] + pub fn adapter_found(&self) -> bool { + self.adapter_found + } + + #[wasm_bindgen(js_name = deviceCreated)] + pub fn device_created(&self) -> bool { + self.device_created + } +} + +#[wasm_bindgen] +pub struct GpuInfo { + vendor: String, + renderer: String, + architecture: String, + description: String, + source: String, +} + +#[wasm_bindgen] +impl GpuInfo { + #[wasm_bindgen(js_name = detect)] + pub async fn detect() -> Result { + if let Some(info) = detect_webgpu_info().await? { + return Ok(info); + } + + if let Some(info) = detect_webgl_info()? { + return Ok(info); + } + + Ok(GpuInfo { + vendor: "unknown".to_string(), + renderer: "unknown".to_string(), + architecture: "unknown".to_string(), + description: "No GPU details exposed by this browser".to_string(), + source: "none".to_string(), + }) + } + + pub fn vendor(&self) -> String { + self.vendor.clone() + } + + pub fn renderer(&self) -> String { + self.renderer.clone() + } + + pub fn architecture(&self) -> String { + self.architecture.clone() + } + + pub fn description(&self) -> String { + self.description.clone() + } + + pub fn source(&self) -> String { + self.source.clone() + } +} + +async fn detect_webgpu_info() -> Result, JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let navigator = window.navigator(); + let gpu = js_sys::Reflect::get(&navigator, &JsValue::from_str("gpu"))?; + + if gpu.is_null() || gpu.is_undefined() { + return Ok(None); + } + + let request_adapter = match js_sys::Reflect::get(&gpu, &JsValue::from_str("requestAdapter")) + .ok() + .and_then(|value| value.dyn_into::().ok()) + { + Some(request_adapter) => request_adapter, + None => return Ok(None), + }; + + let adapter_promise = request_adapter + .call0(&gpu)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestAdapter did not return a Promise"))?; + let adapter = JsFuture::from(adapter_promise).await?; + + if adapter.is_null() || adapter.is_undefined() { + return Ok(None); + } + + let info_object = if let Some(request_adapter_info) = + js_sys::Reflect::get(&adapter, &JsValue::from_str("requestAdapterInfo")) + .ok() + .and_then(|value| value.dyn_into::().ok()) + { + let info_promise = request_adapter_info + .call0(&adapter)? + .dyn_into::() + .map_err(|_| JsValue::from_str("requestAdapterInfo did not return a Promise"))?; + JsFuture::from(info_promise).await? + } else { + js_sys::Reflect::get(&adapter, &JsValue::from_str("info"))? + }; + + if info_object.is_null() || info_object.is_undefined() { + return Ok(None); + } + + let vendor = js_string_field(&info_object, "vendor"); + let architecture = js_string_field(&info_object, "architecture"); + let description = js_string_field(&info_object, "description"); + let device = js_string_field(&info_object, "device"); + let renderer = if device.is_empty() { description.clone() } else { device }; + + Ok(Some(GpuInfo { + vendor: string_or_unknown(vendor), + renderer: string_or_unknown(renderer), + architecture: string_or_unknown(architecture), + description: string_or_unknown(description), + source: "webgpu".to_string(), + })) +} + +fn detect_webgl_info() -> Result, JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let document = window + .document() + .ok_or_else(|| JsValue::from_str("No document available"))?; + let canvas = document + .create_element("canvas")? + .dyn_into::() + .map_err(|_| JsValue::from_str("Failed to create canvas element"))?; + + let context = canvas + .get_context("webgl")? + .or_else(|| canvas.get_context("webgl2").ok().flatten()); + + let Some(context) = context else { + return Ok(None); + }; + + let get_extension = match js_sys::Reflect::get(&context, &JsValue::from_str("getExtension")) + .ok() + .and_then(|value| value.dyn_into::().ok()) + { + Some(get_extension) => get_extension, + None => return Ok(None), + }; + + let extension = get_extension.call1(&context, &JsValue::from_str("WEBGL_debug_renderer_info"))?; + if extension.is_null() || extension.is_undefined() { + return Ok(None); + } + + let get_parameter = match js_sys::Reflect::get(&context, &JsValue::from_str("getParameter")) + .ok() + .and_then(|value| value.dyn_into::().ok()) + { + Some(get_parameter) => get_parameter, + None => return Ok(None), + }; + + let vendor_enum = js_sys::Reflect::get(&extension, &JsValue::from_str("UNMASKED_VENDOR_WEBGL"))?; + let renderer_enum = js_sys::Reflect::get(&extension, &JsValue::from_str("UNMASKED_RENDERER_WEBGL"))?; + + let vendor = get_parameter + .call1(&context, &vendor_enum) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()); + let renderer = get_parameter + .call1(&context, &renderer_enum) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()); + + Ok(Some(GpuInfo { + vendor, + renderer: renderer.clone(), + architecture: "unknown".to_string(), + description: renderer, + source: "webgl_debug_renderer_info".to_string(), + })) +} + +fn js_string_field(value: &JsValue, field: &str) -> String { + js_sys::Reflect::get(value, &JsValue::from_str(field)) + .ok() + .and_then(|field_value| field_value.as_string()) + .unwrap_or_default() +} + +fn string_or_unknown(value: String) -> String { + if value.is_empty() { "unknown".to_string() } else { value } +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("graphics-info module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + false +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + set_module_status("graphics-info: entered run()")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("detecting graphics support")?; + let support = GraphicsSupport::detect()?; + log(&format!( + "graphics support: webgl={} webgl2={} webgpu={} webnn={}", + support.webgl_supported(), + support.webgl2_supported(), + support.webgpu_supported(), + support.webnn_supported() + ))?; + + log("probing WebGPU")?; + let probe = WebGpuProbeResult::test().await?; + log(&format!( + "WebGPU probe: adapter={} device={}", + probe.adapter_found(), + probe.device_created() + ))?; + + log("detecting GPU info")?; + let gpu = GpuInfo::detect().await?; + log(&format!( + "GPU info: vendor={} renderer={} architecture={} source={}", + gpu.vendor(), + gpu.renderer(), + gpu.architecture(), + gpu.source() + ))?; + + client.send_client_event( + "graphics", + "info_detected", + json!({ + "support": { + "webgl": support.webgl_supported(), + "webgl2": support.webgl2_supported(), + "webgpu": support.webgpu_supported(), + "webnn": support.webnn_supported(), + }, + "webgpu_probe": { + "adapter_found": probe.adapter_found(), + "device_created": probe.device_created(), + }, + "gpu": { + "vendor": gpu.vendor(), + "renderer": gpu.renderer(), + "architecture": gpu.architecture(), + "description": gpu.description(), + "source": gpu.source(), + } + }), + )?; + + set_module_status(&format!( + "graphics-info: detected\nGPU: {}\nRenderer: {}\nWebGPU: {}", + gpu.vendor(), + gpu.renderer(), + if probe.device_created() { + "Available" + } else { + "Unavailable" + } + ))?; + + client.disconnect(); + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let _ = set_module_status(&format!("graphics-info: error\n{}", message)); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[graphics-info] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/har1/Cargo.toml b/services/ws-modules/har1/Cargo.toml index a4e55ad..259c753 100644 --- a/services/ws-modules/har1/Cargo.toml +++ b/services/ws-modules/har1/Cargo.toml @@ -10,6 +10,7 @@ repository.workspace = true crate-type = ["cdylib", "rlib"] [dependencies] +et-web.workspace = true et-ws-wasm-agent = { path = "../../ws-wasm-agent" } js-sys = "0.3" serde.workspace = true diff --git a/services/ws-modules/har1/src/lib.rs b/services/ws-modules/har1/src/lib.rs index f016dc4..02c83fb 100644 --- a/services/ws-modules/har1/src/lib.rs +++ b/services/ws-modules/har1/src/lib.rs @@ -1,11 +1,17 @@ +use std::cell::RefCell; use std::collections::VecDeque; +use std::rc::Rc; -use et_ws_wasm_agent::{DeviceSensors, MotionReading, WsClient, WsClientConfig, set_textarea_value}; +use et_web::{SENSOR_PERMISSION_GRANTED, request_sensor_permission}; +use et_ws_wasm_agent::{ + WsClient, WsClientConfig, js_bool_field, js_nested_object, js_number_field, set_textarea_value, +}; use js_sys::{Array, Float32Array, Function, Promise, Reflect}; use serde_json::json; use tracing::info; use wasm_bindgen::prelude::*; use wasm_bindgen_futures::JsFuture; +use web_sys::Event; const HAR_MODEL_PATH: &str = "/static/models/human_activity_recognition.onnx"; const HAR_SEQUENCE_LENGTH: usize = 512; @@ -16,6 +22,276 @@ const STANDARD_GRAVITY: f64 = 9.80665; const GRAVITY_FILTER_ALPHA: f64 = 0.8; const HAR_CLASS_LABELS: [&str; 6] = ["class_0", "class_1", "class_2", "class_3", "class_4", "class_5"]; +#[derive(Clone, Default)] +struct OrientationReadingState { + alpha: Option, + beta: Option, + gamma: Option, + absolute: Option, +} + +#[derive(Clone, Default)] +struct MotionReadingState { + acceleration_x: Option, + acceleration_y: Option, + acceleration_z: Option, + acceleration_including_gravity_x: Option, + acceleration_including_gravity_y: Option, + acceleration_including_gravity_z: Option, + rotation_rate_alpha: Option, + rotation_rate_beta: Option, + rotation_rate_gamma: Option, + interval_ms: Option, +} + +#[wasm_bindgen] +pub struct OrientationReading { + inner: OrientationReadingState, +} + +#[wasm_bindgen] +impl OrientationReading { + pub fn alpha(&self) -> f64 { + self.inner.alpha.unwrap_or(0.0) + } + + pub fn beta(&self) -> f64 { + self.inner.beta.unwrap_or(0.0) + } + + pub fn gamma(&self) -> f64 { + self.inner.gamma.unwrap_or(0.0) + } + + pub fn absolute(&self) -> bool { + self.inner.absolute.unwrap_or(false) + } +} + +#[wasm_bindgen] +pub struct MotionReading { + inner: MotionReadingState, +} + +#[wasm_bindgen] +impl MotionReading { + #[wasm_bindgen(js_name = accelerationX)] + pub fn acceleration_x(&self) -> f64 { + self.inner.acceleration_x.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationY)] + pub fn acceleration_y(&self) -> f64 { + self.inner.acceleration_y.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationZ)] + pub fn acceleration_z(&self) -> f64 { + self.inner.acceleration_z.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityX)] + pub fn acceleration_including_gravity_x(&self) -> f64 { + self.inner.acceleration_including_gravity_x.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityY)] + pub fn acceleration_including_gravity_y(&self) -> f64 { + self.inner.acceleration_including_gravity_y.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityZ)] + pub fn acceleration_including_gravity_z(&self) -> f64 { + self.inner.acceleration_including_gravity_z.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateAlpha)] + pub fn rotation_rate_alpha(&self) -> f64 { + self.inner.rotation_rate_alpha.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateBeta)] + pub fn rotation_rate_beta(&self) -> f64 { + self.inner.rotation_rate_beta.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateGamma)] + pub fn rotation_rate_gamma(&self) -> f64 { + self.inner.rotation_rate_gamma.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = intervalMs)] + pub fn interval_ms(&self) -> f64 { + self.inner.interval_ms.unwrap_or(0.0) + } +} + +#[wasm_bindgen] +pub struct DeviceSensors { + active: bool, + orientation_state: Rc>>, + motion_state: Rc>>, + orientation_listener: Option>, + motion_listener: Option>, +} + +impl Default for DeviceSensors { + fn default() -> Self { + Self::new() + } +} + +#[wasm_bindgen] +impl DeviceSensors { + #[wasm_bindgen(constructor)] + pub fn new() -> DeviceSensors { + DeviceSensors { + active: false, + orientation_state: Rc::new(RefCell::new(None)), + motion_state: Rc::new(RefCell::new(None)), + orientation_listener: None, + motion_listener: None, + } + } + + pub async fn start(&mut self) -> Result<(), JsValue> { + if self.active { + return Ok(()); + } + + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + + if js_sys::Reflect::get(&window, &JsValue::from_str("DeviceOrientationEvent"))?.is_undefined() + && js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?.is_undefined() + { + return Err(JsValue::from_str( + "Device orientation and motion APIs are not supported in this browser.", + )); + } + + let orientation_permission = request_sensor_permission(js_sys::Reflect::get( + &window, + &JsValue::from_str("DeviceOrientationEvent"), + )?) + .await?; + let motion_permission = + request_sensor_permission(js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?).await?; + + if orientation_permission != SENSOR_PERMISSION_GRANTED || motion_permission != SENSOR_PERMISSION_GRANTED { + return Err(JsValue::from_str(&format!( + "Sensor permission denied (orientation={orientation_permission}, motion={motion_permission})" + ))); + } + + *self.orientation_state.borrow_mut() = None; + *self.motion_state.borrow_mut() = None; + + let orientation_state = self.orientation_state.clone(); + let orientation_listener = Closure::wrap(Box::new(move |event: Event| { + let value: JsValue = event.into(); + *orientation_state.borrow_mut() = Some(OrientationReadingState { + alpha: js_number_field(&value, "alpha"), + beta: js_number_field(&value, "beta"), + gamma: js_number_field(&value, "gamma"), + absolute: js_bool_field(&value, "absolute"), + }); + }) as Box); + + let motion_state = self.motion_state.clone(); + let motion_listener = Closure::wrap(Box::new(move |event: Event| { + let value: JsValue = event.into(); + let acceleration = js_nested_object(&value, "acceleration"); + let acceleration_including_gravity = js_nested_object(&value, "accelerationIncludingGravity"); + let rotation_rate = js_nested_object(&value, "rotationRate"); + + *motion_state.borrow_mut() = Some(MotionReadingState { + acceleration_x: acceleration.as_ref().and_then(|v| js_number_field(v, "x")), + acceleration_y: acceleration.as_ref().and_then(|v| js_number_field(v, "y")), + acceleration_z: acceleration.as_ref().and_then(|v| js_number_field(v, "z")), + acceleration_including_gravity_x: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "x")), + acceleration_including_gravity_y: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "y")), + acceleration_including_gravity_z: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "z")), + rotation_rate_alpha: rotation_rate.as_ref().and_then(|v| js_number_field(v, "alpha")), + rotation_rate_beta: rotation_rate.as_ref().and_then(|v| js_number_field(v, "beta")), + rotation_rate_gamma: rotation_rate.as_ref().and_then(|v| js_number_field(v, "gamma")), + interval_ms: js_number_field(&value, "interval"), + }); + }) as Box); + + let target: &web_sys::EventTarget = window.as_ref(); + target.add_event_listener_with_callback("deviceorientation", orientation_listener.as_ref().unchecked_ref())?; + target.add_event_listener_with_callback("devicemotion", motion_listener.as_ref().unchecked_ref())?; + + self.orientation_listener = Some(orientation_listener); + self.motion_listener = Some(motion_listener); + self.active = true; + info!("Device sensors started"); + Ok(()) + } + + pub fn stop(&mut self) -> Result<(), JsValue> { + if !self.active { + return Ok(()); + } + + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let target: &web_sys::EventTarget = window.as_ref(); + + if let Some(listener) = self.orientation_listener.as_ref() { + target.remove_event_listener_with_callback("deviceorientation", listener.as_ref().unchecked_ref())?; + } + + if let Some(listener) = self.motion_listener.as_ref() { + target.remove_event_listener_with_callback("devicemotion", listener.as_ref().unchecked_ref())?; + } + + self.orientation_listener = None; + self.motion_listener = None; + self.active = false; + info!("Device sensors stopped"); + Ok(()) + } + + #[wasm_bindgen(js_name = isActive)] + pub fn is_active(&self) -> bool { + self.active + } + + #[wasm_bindgen(js_name = hasOrientation)] + pub fn has_orientation(&self) -> bool { + self.orientation_state.borrow().is_some() + } + + #[wasm_bindgen(js_name = hasMotion)] + pub fn has_motion(&self) -> bool { + self.motion_state.borrow().is_some() + } + + #[wasm_bindgen(js_name = orientationSnapshot)] + pub fn orientation_snapshot(&self) -> Result { + self.orientation_state + .borrow() + .clone() + .map(|inner| OrientationReading { inner }) + .ok_or_else(|| JsValue::from_str("No orientation reading available yet")) + } + + #[wasm_bindgen(js_name = motionSnapshot)] + pub fn motion_snapshot(&self) -> Result { + self.motion_state + .borrow() + .clone() + .map(|inner| MotionReading { inner }) + .ok_or_else(|| JsValue::from_str("No motion reading available yet")) + } +} + #[wasm_bindgen(start)] pub fn init() { let _ = tracing_wasm::try_set_as_global_default(); diff --git a/services/ws-modules/nfc/Cargo.toml b/services/ws-modules/nfc/Cargo.toml new file mode 100644 index 0000000..7dcbeea --- /dev/null +++ b/services/ws-modules/nfc/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "nfc module" +name = "et-ws-nfc" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/nfc/src/lib.rs b/services/ws-modules/nfc/src/lib.rs new file mode 100644 index 0000000..faeea4e --- /dev/null +++ b/services/ws-modules/nfc/src/lib.rs @@ -0,0 +1,348 @@ +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; + +const NFC_SCAN_TIMEOUT_MS: i32 = 60_000; + +#[wasm_bindgen] +pub struct NfcScanResult { + serial_number: String, + record_summary: String, +} + +#[wasm_bindgen] +impl NfcScanResult { + #[wasm_bindgen(js_name = scanOnce)] + pub async fn scan_once() -> Result { + Self::scan_once_with_timeout(NFC_SCAN_TIMEOUT_MS).await + } + + #[wasm_bindgen(js_name = scanOnceWithTimeout)] + pub async fn scan_once_with_timeout(timeout_ms: i32) -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let ndef_ctor = js_sys::Reflect::get(&window, &JsValue::from_str("NDEFReader")) + .ok() + .filter(|value| !value.is_undefined() && !value.is_null()) + .ok_or_else(|| JsValue::from_str("Web NFC is not available in this browser context"))?; + + let constructor = ndef_ctor + .dyn_into::() + .map_err(|_| JsValue::from_str("NDEFReader constructor is not callable"))?; + let reader = js_sys::Reflect::construct(&constructor, &js_sys::Array::new())?; + + let scan = js_sys::Reflect::get(&reader, &JsValue::from_str("scan"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("NDEFReader.scan is not callable"))?; + let scan_promise = scan + .call0(&reader)? + .dyn_into::() + .map_err(|_| JsValue::from_str("NDEFReader.scan did not return a Promise"))?; + let _ = JsFuture::from(scan_promise).await?; + + let promise = js_sys::Promise::new(&mut |resolve, reject| { + let reject_for_timeout = reject.clone(); + let timeout_closure = Closure::once(Box::new(move || { + let _ = reject_for_timeout.call1( + &JsValue::NULL, + &JsValue::from_str(&format!("NFC scan timed out after {} seconds", timeout_ms / 1000)), + ); + }) as Box); + + if let Some(window) = web_sys::window() { + let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0( + timeout_closure.as_ref().unchecked_ref(), + timeout_ms, + ); + } + + let reject_for_error = reject.clone(); + + let on_reading = Closure::once(Box::new(move |event: JsValue| { + let serial_number = js_sys::Reflect::get(&event, &JsValue::from_str("serialNumber")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()); + let record_summary = summarize_ndef_records(&event); + + let payload = js_sys::Object::new(); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("serialNumber"), + &JsValue::from_str(&serial_number), + ); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("recordSummary"), + &JsValue::from_str(&record_summary), + ); + let _ = resolve.call1(&JsValue::NULL, &payload); + }) as Box); + + let on_reading_error = Closure::once(Box::new(move |event: JsValue| { + let message = js_sys::Reflect::get(&event, &JsValue::from_str("message")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "NFC reading failed".to_string()); + let _ = reject_for_error.call1(&JsValue::NULL, &JsValue::from_str(&message)); + }) as Box); + + let _ = js_sys::Reflect::set( + &reader, + &JsValue::from_str("onreading"), + on_reading.as_ref().unchecked_ref(), + ); + let _ = js_sys::Reflect::set( + &reader, + &JsValue::from_str("onreadingerror"), + on_reading_error.as_ref().unchecked_ref(), + ); + + on_reading.forget(); + on_reading_error.forget(); + timeout_closure.forget(); + }); + + let result = JsFuture::from(promise).await?; + let serial_number = js_sys::Reflect::get(&result, &JsValue::from_str("serialNumber"))? + .as_string() + .unwrap_or_else(|| "unknown".to_string()); + let record_summary = js_sys::Reflect::get(&result, &JsValue::from_str("recordSummary"))? + .as_string() + .unwrap_or_else(|| "no records".to_string()); + + info!( + "NFC scan captured: serial_number={} summary={}", + serial_number, record_summary + ); + + Ok(NfcScanResult { + serial_number, + record_summary, + }) + } + + #[wasm_bindgen(js_name = serialNumber)] + pub fn serial_number(&self) -> String { + self.serial_number.clone() + } + + #[wasm_bindgen(js_name = recordSummary)] + pub fn record_summary(&self) -> String { + self.record_summary.clone() + } +} + +fn summarize_ndef_records(event: &JsValue) -> String { + let message = match js_sys::Reflect::get(event, &JsValue::from_str("message")) { + Ok(message) => message, + Err(_) => return "no message".to_string(), + }; + let records = match js_sys::Reflect::get(&message, &JsValue::from_str("records")) { + Ok(records) => records, + Err(_) => return "no records".to_string(), + }; + let length = match js_sys::Reflect::get(&records, &JsValue::from_str("length")) + .ok() + .and_then(|value| value.as_f64()) + { + Some(length) => length as u32, + None => return "no records".to_string(), + }; + + let mut summary = Vec::new(); + for index in 0..length { + let record = match js_sys::Reflect::get(&records, &JsValue::from_f64(index as f64)) { + Ok(record) => record, + Err(_) => continue, + }; + let record_type = js_sys::Reflect::get(&record, &JsValue::from_str("recordType")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "unknown".to_string()); + let media_type = js_sys::Reflect::get(&record, &JsValue::from_str("mediaType")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_default(); + let id = js_sys::Reflect::get(&record, &JsValue::from_str("id")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_default(); + + let mut parts = vec![format!("type={record_type}")]; + if !media_type.is_empty() { + parts.push(format!("media={media_type}")); + } + if !id.is_empty() { + parts.push(format!("id={id}")); + } + summary.push(parts.join(",")); + } + + if summary.is_empty() { + "no records".to_string() + } else { + summary.join(" | ") + } +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("nfc module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + false +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + set_module_status("nfc: Starting NFC scan...\nPlease tap an NFC tag within 60 seconds.")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("waiting for NFC tap (60 second timeout)...")?; + set_module_status("nfc: Waiting for NFC tap...\nPlease hold your device near an NFC tag.")?; + + let result = NfcScanResult::scan_once().await?; + let serial = result.serial_number(); + let summary = result.record_summary(); + log(&format!("NFC scan captured: serial={} summary={}", serial, summary))?; + + client.send_client_event( + "nfc", + "scan_captured", + json!({ + "serial_number": serial, + "record_summary": summary, + }), + )?; + + set_module_status(&format!("nfc: Scan captured\nSerial: {}\nSummary: {}", serial, summary))?; + + client.disconnect(); + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let error_display = if message.contains("not available") || message.contains("not supported") { + format!( + "nfc: Not available\nWeb NFC requires: Chrome/Edge on Android, HTTPS connection\nError: {}", + message + ) + } else if message.contains("timed out") || message.contains("timeout") { + "nfc: Timeout\n\nNo NFC tag was detected within 60 seconds.\nPlease try again and tap an NFC tag." + .to_string() + } else { + format!("nfc: Error\n\n{}", message) + }; + let _ = set_module_status(&error_display); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[nfc] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + if let Some(s) = error.as_string() { + return s; + } + + if let Some(obj) = error.dyn_ref::() + && let Ok(message) = js_sys::Reflect::get(obj, &JsValue::from_str("message")) + && let Some(msg) = message.as_string() + { + return msg; + } + + js_sys::JSON::stringify(error) + .ok() + .and_then(|s| s.as_string()) + .unwrap_or_else(|| "Unknown error".to_string()) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/sensor1/Cargo.toml b/services/ws-modules/sensor1/Cargo.toml new file mode 100644 index 0000000..a1c8d40 --- /dev/null +++ b/services/ws-modules/sensor1/Cargo.toml @@ -0,0 +1,26 @@ +[package] +description = "device sensor stream" +name = "et-ws-sensor1" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-web.workspace = true +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Event", "EventTarget", "Navigator", "Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/sensor1/src/lib.rs b/services/ws-modules/sensor1/src/lib.rs new file mode 100644 index 0000000..237f80a --- /dev/null +++ b/services/ws-modules/sensor1/src/lib.rs @@ -0,0 +1,449 @@ +use std::cell::RefCell; +use std::rc::Rc; + +use et_web::{SENSOR_PERMISSION_GRANTED, request_sensor_permission}; +use et_ws_wasm_agent::{js_bool_field, js_nested_object, js_number_field, set_textarea_value}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use web_sys::Event; + +const SENSOR_RENDER_INTERVAL_MS: i32 = 150; + +#[derive(Clone, Default)] +struct OrientationReadingState { + alpha: Option, + beta: Option, + gamma: Option, + absolute: Option, +} + +#[derive(Clone, Default)] +struct MotionReadingState { + acceleration_x: Option, + acceleration_y: Option, + acceleration_z: Option, + acceleration_including_gravity_x: Option, + acceleration_including_gravity_y: Option, + acceleration_including_gravity_z: Option, + rotation_rate_alpha: Option, + rotation_rate_beta: Option, + rotation_rate_gamma: Option, + interval_ms: Option, +} + +#[wasm_bindgen] +pub struct OrientationReading { + inner: OrientationReadingState, +} + +#[wasm_bindgen] +impl OrientationReading { + pub fn alpha(&self) -> f64 { + self.inner.alpha.unwrap_or(0.0) + } + + pub fn beta(&self) -> f64 { + self.inner.beta.unwrap_or(0.0) + } + + pub fn gamma(&self) -> f64 { + self.inner.gamma.unwrap_or(0.0) + } + + pub fn absolute(&self) -> bool { + self.inner.absolute.unwrap_or(false) + } +} + +#[wasm_bindgen] +pub struct MotionReading { + inner: MotionReadingState, +} + +#[wasm_bindgen] +impl MotionReading { + #[wasm_bindgen(js_name = accelerationX)] + pub fn acceleration_x(&self) -> f64 { + self.inner.acceleration_x.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationY)] + pub fn acceleration_y(&self) -> f64 { + self.inner.acceleration_y.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationZ)] + pub fn acceleration_z(&self) -> f64 { + self.inner.acceleration_z.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityX)] + pub fn acceleration_including_gravity_x(&self) -> f64 { + self.inner.acceleration_including_gravity_x.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityY)] + pub fn acceleration_including_gravity_y(&self) -> f64 { + self.inner.acceleration_including_gravity_y.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = accelerationIncludingGravityZ)] + pub fn acceleration_including_gravity_z(&self) -> f64 { + self.inner.acceleration_including_gravity_z.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateAlpha)] + pub fn rotation_rate_alpha(&self) -> f64 { + self.inner.rotation_rate_alpha.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateBeta)] + pub fn rotation_rate_beta(&self) -> f64 { + self.inner.rotation_rate_beta.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = rotationRateGamma)] + pub fn rotation_rate_gamma(&self) -> f64 { + self.inner.rotation_rate_gamma.unwrap_or(0.0) + } + + #[wasm_bindgen(js_name = intervalMs)] + pub fn interval_ms(&self) -> f64 { + self.inner.interval_ms.unwrap_or(0.0) + } +} + +#[wasm_bindgen] +pub struct DeviceSensors { + active: bool, + orientation_state: Rc>>, + motion_state: Rc>>, + orientation_listener: Option>, + motion_listener: Option>, +} + +impl Default for DeviceSensors { + fn default() -> Self { + Self::new() + } +} + +#[wasm_bindgen] +impl DeviceSensors { + #[wasm_bindgen(constructor)] + pub fn new() -> DeviceSensors { + DeviceSensors { + active: false, + orientation_state: Rc::new(RefCell::new(None)), + motion_state: Rc::new(RefCell::new(None)), + orientation_listener: None, + motion_listener: None, + } + } + + pub async fn start(&mut self) -> Result<(), JsValue> { + if self.active { + return Ok(()); + } + + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + + if js_sys::Reflect::get(&window, &JsValue::from_str("DeviceOrientationEvent"))?.is_undefined() + && js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?.is_undefined() + { + return Err(JsValue::from_str( + "Device orientation and motion APIs are not supported in this browser.", + )); + } + + let orientation_permission = request_sensor_permission(js_sys::Reflect::get( + &window, + &JsValue::from_str("DeviceOrientationEvent"), + )?) + .await?; + let motion_permission = + request_sensor_permission(js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?).await?; + + if orientation_permission != SENSOR_PERMISSION_GRANTED || motion_permission != SENSOR_PERMISSION_GRANTED { + return Err(JsValue::from_str(&format!( + "Sensor permission denied (orientation={orientation_permission}, motion={motion_permission})" + ))); + } + + *self.orientation_state.borrow_mut() = None; + *self.motion_state.borrow_mut() = None; + + let orientation_state = self.orientation_state.clone(); + let orientation_listener = Closure::wrap(Box::new(move |event: Event| { + let value: JsValue = event.into(); + *orientation_state.borrow_mut() = Some(OrientationReadingState { + alpha: js_number_field(&value, "alpha"), + beta: js_number_field(&value, "beta"), + gamma: js_number_field(&value, "gamma"), + absolute: js_bool_field(&value, "absolute"), + }); + }) as Box); + + let motion_state = self.motion_state.clone(); + let motion_listener = Closure::wrap(Box::new(move |event: Event| { + let value: JsValue = event.into(); + let acceleration = js_nested_object(&value, "acceleration"); + let acceleration_including_gravity = js_nested_object(&value, "accelerationIncludingGravity"); + let rotation_rate = js_nested_object(&value, "rotationRate"); + + *motion_state.borrow_mut() = Some(MotionReadingState { + acceleration_x: acceleration.as_ref().and_then(|v| js_number_field(v, "x")), + acceleration_y: acceleration.as_ref().and_then(|v| js_number_field(v, "y")), + acceleration_z: acceleration.as_ref().and_then(|v| js_number_field(v, "z")), + acceleration_including_gravity_x: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "x")), + acceleration_including_gravity_y: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "y")), + acceleration_including_gravity_z: acceleration_including_gravity + .as_ref() + .and_then(|v| js_number_field(v, "z")), + rotation_rate_alpha: rotation_rate.as_ref().and_then(|v| js_number_field(v, "alpha")), + rotation_rate_beta: rotation_rate.as_ref().and_then(|v| js_number_field(v, "beta")), + rotation_rate_gamma: rotation_rate.as_ref().and_then(|v| js_number_field(v, "gamma")), + interval_ms: js_number_field(&value, "interval"), + }); + }) as Box); + + let target: &web_sys::EventTarget = window.as_ref(); + target.add_event_listener_with_callback("deviceorientation", orientation_listener.as_ref().unchecked_ref())?; + target.add_event_listener_with_callback("devicemotion", motion_listener.as_ref().unchecked_ref())?; + + self.orientation_listener = Some(orientation_listener); + self.motion_listener = Some(motion_listener); + self.active = true; + info!("Device sensors started"); + Ok(()) + } + + pub fn stop(&mut self) -> Result<(), JsValue> { + if !self.active { + return Ok(()); + } + + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let target: &web_sys::EventTarget = window.as_ref(); + + if let Some(listener) = self.orientation_listener.as_ref() { + target.remove_event_listener_with_callback("deviceorientation", listener.as_ref().unchecked_ref())?; + } + + if let Some(listener) = self.motion_listener.as_ref() { + target.remove_event_listener_with_callback("devicemotion", listener.as_ref().unchecked_ref())?; + } + + self.orientation_listener = None; + self.motion_listener = None; + self.active = false; + info!("Device sensors stopped"); + Ok(()) + } + + #[wasm_bindgen(js_name = isActive)] + pub fn is_active(&self) -> bool { + self.active + } + + #[wasm_bindgen(js_name = hasOrientation)] + pub fn has_orientation(&self) -> bool { + self.orientation_state.borrow().is_some() + } + + #[wasm_bindgen(js_name = hasMotion)] + pub fn has_motion(&self) -> bool { + self.motion_state.borrow().is_some() + } + + #[wasm_bindgen(js_name = orientationSnapshot)] + pub fn orientation_snapshot(&self) -> Result { + self.orientation_state + .borrow() + .clone() + .map(|inner| OrientationReading { inner }) + .ok_or_else(|| JsValue::from_str("No orientation reading available yet")) + } + + #[wasm_bindgen(js_name = motionSnapshot)] + pub fn motion_snapshot(&self) -> Result { + self.motion_state + .borrow() + .clone() + .map(|inner| MotionReading { inner }) + .ok_or_else(|| JsValue::from_str("No motion reading available yet")) + } +} + +struct SensorStreamRuntime { + sensors: DeviceSensors, + render_interval_id: i32, + _render_closure: Closure, +} + +thread_local! { + static SENSOR_STREAM_RUNTIME: RefCell> = const { RefCell::new(None) }; +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("sensor stream workflow module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + SENSOR_STREAM_RUNTIME.with(|runtime| runtime.borrow().is_some()) +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + if is_running() { + return Ok(()); + } + + let mut sensors = DeviceSensors::new(); + set_sensor_status("sensor stream: requesting sensor access")?; + sensors.start().await?; + render_sensor_output(&sensors)?; + + let render_closure = Closure::wrap(Box::new(move || { + SENSOR_STREAM_RUNTIME.with(|runtime| { + let runtime_ref = runtime.borrow(); + let Some(runtime) = runtime_ref.as_ref() else { + return; + }; + + let _ = render_sensor_output(&runtime.sensors); + }); + }) as Box); + + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let render_interval_id = window.set_interval_with_callback_and_timeout_and_arguments_0( + render_closure.as_ref().unchecked_ref(), + SENSOR_RENDER_INTERVAL_MS, + )?; + + SENSOR_STREAM_RUNTIME.with(|runtime| { + runtime.borrow_mut().replace(SensorStreamRuntime { + sensors, + render_interval_id, + _render_closure: render_closure, + }); + }); + + let stop_callback = Closure::once_into_js(move || { + if is_running() { + let _ = stop(); + let _ = set_sensor_status("sensor stream: finished automatically after 15 seconds"); + } + }); + window.set_timeout_with_callback_and_timeout_and_arguments_0(stop_callback.unchecked_ref(), 15000)?; + + set_sensor_status("sensor stream: running")?; + Ok(()) +} + +#[wasm_bindgen] +pub fn stop() -> Result<(), JsValue> { + SENSOR_STREAM_RUNTIME.with(|runtime| { + if let Some(mut runtime) = runtime.borrow_mut().take() { + if let Some(window) = web_sys::window() { + window.clear_interval_with_handle(runtime.render_interval_id); + } + runtime.sensors.stop()?; + } + + Ok::<(), JsValue>(()) + })?; + + set_sensor_status("sensor stream: stopped")?; + set_textarea_value("sensor-output", "Waiting for device sensor data...") +} + +fn set_sensor_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn render_sensor_output(sensors: &DeviceSensors) -> Result<(), JsValue> { + let orientation = if sensors.has_orientation() { + Some(sensors.orientation_snapshot()?) + } else { + None + }; + let motion = if sensors.has_motion() { + Some(sensors.motion_snapshot()?) + } else { + None + }; + + let mut lines = vec![ + String::from("Device sensor stream"), + format!( + "updated: {}", + String::from(js_sys::Date::new_0().to_locale_time_string("en-US")) + ), + String::new(), + String::from("orientation"), + ]; + + if let Some(orientation) = orientation { + lines.push(format!("alpha: {}", format_number(orientation.alpha(), 3))); + lines.push(format!("beta: {}", format_number(orientation.beta(), 3))); + lines.push(format!("gamma: {}", format_number(orientation.gamma(), 3))); + lines.push(format!("absolute: {}", orientation.absolute())); + } else { + lines.push(String::from("waiting for orientation event...")); + } + + lines.push(String::new()); + lines.push(String::from("motion")); + if let Some(motion) = motion { + lines.push(format!( + "acceleration: x={} y={} z={}", + format_number(motion.acceleration_x(), 3), + format_number(motion.acceleration_y(), 3), + format_number(motion.acceleration_z(), 3) + )); + lines.push(format!( + "acceleration including gravity: x={} y={} z={}", + format_number(motion.acceleration_including_gravity_x(), 3), + format_number(motion.acceleration_including_gravity_y(), 3), + format_number(motion.acceleration_including_gravity_z(), 3) + )); + lines.push(format!( + "rotation rate: alpha={} beta={} gamma={}", + format_number(motion.rotation_rate_alpha(), 3), + format_number(motion.rotation_rate_beta(), 3), + format_number(motion.rotation_rate_gamma(), 3) + )); + lines.push(format!("interval: {} ms", format_number(motion.interval_ms(), 1))); + } else { + lines.push(String::from("waiting for motion event...")); + } + + set_textarea_value("sensor-output", &lines.join("\n")) +} + +fn format_number(value: f64, digits: usize) -> String { + if value.is_finite() { + format!("{value:.digits$}") + } else { + String::from("n/a") + } +} diff --git a/services/ws-modules/speech-recognition/Cargo.toml b/services/ws-modules/speech-recognition/Cargo.toml new file mode 100644 index 0000000..3d42230 --- /dev/null +++ b/services/ws-modules/speech-recognition/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "speech recognition module" +name = "et-ws-speech-recognition" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["Window", "console"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/speech-recognition/src/lib.rs b/services/ws-modules/speech-recognition/src/lib.rs new file mode 100644 index 0000000..1dbf046 --- /dev/null +++ b/services/ws-modules/speech-recognition/src/lib.rs @@ -0,0 +1,472 @@ +use std::cell::{Cell, RefCell}; +use std::rc::Rc; + +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; + +#[wasm_bindgen] +pub struct SpeechRecognitionResult { + transcript: String, + confidence: f64, +} + +#[wasm_bindgen] +impl SpeechRecognitionResult { + #[wasm_bindgen(js_name = recognizeOnce)] + pub async fn recognize_once() -> Result { + let session = SpeechRecognitionSession::new()?; + session.start().await + } + + pub fn transcript(&self) -> String { + self.transcript.clone() + } + + pub fn confidence(&self) -> f64 { + self.confidence + } +} + +#[wasm_bindgen] +pub struct SpeechRecognitionSession { + recognition: JsValue, + stop_requested: Rc>, +} + +#[wasm_bindgen] +impl SpeechRecognitionSession { + #[wasm_bindgen(constructor)] + pub fn new() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let speech_recognition_ctor = js_sys::Reflect::get(&window, &JsValue::from_str("SpeechRecognition")) + .ok() + .filter(|value| !value.is_undefined() && !value.is_null()) + .or_else(|| { + js_sys::Reflect::get(&window, &JsValue::from_str("webkitSpeechRecognition")) + .ok() + .filter(|value| !value.is_undefined() && !value.is_null()) + }) + .ok_or_else(|| JsValue::from_str("Web Speech API recognition is not available in this browser context"))?; + let constructor = speech_recognition_ctor + .dyn_into::() + .map_err(|_| JsValue::from_str("SpeechRecognition constructor is not callable"))?; + let recognition = js_sys::Reflect::construct(&constructor, &js_sys::Array::new())?; + + js_sys::Reflect::set(&recognition, &JsValue::from_str("lang"), &JsValue::from_str("en-US"))?; + js_sys::Reflect::set(&recognition, &JsValue::from_str("interimResults"), &JsValue::TRUE)?; + js_sys::Reflect::set( + &recognition, + &JsValue::from_str("maxAlternatives"), + &JsValue::from_f64(1.0), + )?; + + Ok(SpeechRecognitionSession { + recognition, + stop_requested: Rc::new(Cell::new(false)), + }) + } + + pub async fn start(&self) -> Result { + self.stop_requested.set(false); + let recognition = self.recognition.clone(); + let stop_requested = self.stop_requested.clone(); + let promise = js_sys::Promise::new(&mut |resolve, reject| { + let settled = Rc::new(Cell::new(false)); + let resolve_for_result = resolve.clone(); + let resolve_for_end = resolve.clone(); + let reject_for_error = reject.clone(); + let reject_for_end = reject.clone(); + let settled_for_result = settled.clone(); + let settled_for_error = settled.clone(); + let settled_for_end = settled.clone(); + let transcript_state: Rc>> = Rc::new(RefCell::new(None)); + let transcript_state_for_result = transcript_state.clone(); + let transcript_state_for_end = transcript_state.clone(); + let stop_requested_for_end = stop_requested.clone(); + + let on_result = Closure::wrap(Box::new(move |event: JsValue| { + if let Some((transcript, confidence, has_final)) = extract_speech_event_transcript(&event) { + *transcript_state_for_result.borrow_mut() = Some((transcript.clone(), confidence)); + + if has_final && !settled_for_result.replace(true) { + let payload = js_sys::Object::new(); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("transcript"), + &JsValue::from_str(&transcript), + ); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("confidence"), + &JsValue::from_f64(confidence), + ); + let _ = resolve_for_result.call1(&JsValue::NULL, &payload); + } + } + }) as Box); + + let on_error = Closure::wrap(Box::new(move |event: JsValue| { + if settled_for_error.replace(true) { + return; + } + let message = js_sys::Reflect::get(&event, &JsValue::from_str("error")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "speech recognition failed".to_string()); + let _ = reject_for_error.call1(&JsValue::NULL, &JsValue::from_str(&message)); + }) as Box); + + let on_end = Closure::wrap(Box::new(move || { + if settled_for_end.replace(true) { + return; + } + if let Some((transcript, confidence)) = transcript_state_for_end.borrow().clone() { + let payload = js_sys::Object::new(); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("transcript"), + &JsValue::from_str(&transcript), + ); + let _ = js_sys::Reflect::set( + &payload, + &JsValue::from_str("confidence"), + &JsValue::from_f64(confidence), + ); + let _ = resolve_for_end.call1(&JsValue::NULL, &payload); + } else if stop_requested_for_end.get() { + let _ = reject_for_end.call1( + &JsValue::NULL, + &JsValue::from_str("speech recognition stopped before any transcript was captured"), + ); + } else { + let _ = reject_for_end.call1( + &JsValue::NULL, + &JsValue::from_str("speech recognition ended without a transcript"), + ); + } + }) as Box); + + let _ = js_sys::Reflect::set( + &recognition, + &JsValue::from_str("onresult"), + on_result.as_ref().unchecked_ref(), + ); + let _ = js_sys::Reflect::set( + &recognition, + &JsValue::from_str("onerror"), + on_error.as_ref().unchecked_ref(), + ); + let _ = js_sys::Reflect::set( + &recognition, + &JsValue::from_str("onend"), + on_end.as_ref().unchecked_ref(), + ); + + if let Some(start) = js_sys::Reflect::get(&recognition, &JsValue::from_str("start")) + .ok() + .and_then(|value| value.dyn_into::().ok()) + { + let _ = start.call0(&recognition); + } else { + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str("SpeechRecognition.start is not callable"), + ); + } + + on_result.forget(); + on_error.forget(); + on_end.forget(); + }); + + let result = JsFuture::from(promise).await?; + let transcript = js_sys::Reflect::get(&result, &JsValue::from_str("transcript"))? + .as_string() + .ok_or_else(|| JsValue::from_str("Speech recognition transcript missing"))?; + let confidence = js_sys::Reflect::get(&result, &JsValue::from_str("confidence"))? + .as_f64() + .unwrap_or(0.0); + + info!("Speech recognition captured transcript with confidence={}", confidence); + + Ok(SpeechRecognitionResult { transcript, confidence }) + } + + pub fn stop(&self) -> Result<(), JsValue> { + self.stop_requested.set(true); + let stop = js_sys::Reflect::get(&self.recognition, &JsValue::from_str("stop"))? + .dyn_into::() + .map_err(|_| JsValue::from_str("SpeechRecognition.stop is not callable"))?; + stop.call0(&self.recognition)?; + Ok(()) + } +} + +fn extract_speech_event_transcript(event: &JsValue) -> Option<(String, f64, bool)> { + let results = js_sys::Reflect::get(event, &JsValue::from_str("results")).ok()?; + let length = js_sys::Reflect::get(&results, &JsValue::from_str("length")) + .ok()? + .as_f64()? as u32; + + let mut transcript_parts = Vec::new(); + let mut confidence = 0.0; + let mut confidence_count = 0_u32; + let mut has_final = false; + + for index in 0..length { + let result = match js_sys::Reflect::get(&results, &JsValue::from_f64(index as f64)) { + Ok(result) => result, + Err(_) => continue, + }; + + let alternative = match js_sys::Reflect::get(&result, &JsValue::from_f64(0.0)) { + Ok(alternative) => alternative, + Err(_) => continue, + }; + + if let Some(part) = js_sys::Reflect::get(&alternative, &JsValue::from_str("transcript")) + .ok() + .and_then(|value| value.as_string()) + { + let trimmed = part.trim(); + if !trimmed.is_empty() { + transcript_parts.push(trimmed.to_string()); + } + } + + if let Some(value) = js_sys::Reflect::get(&alternative, &JsValue::from_str("confidence")) + .ok() + .and_then(|value| value.as_f64()) + { + confidence += value; + confidence_count += 1; + } + + if js_sys::Reflect::get(&result, &JsValue::from_str("isFinal")) + .ok() + .and_then(|value| value.as_bool()) + .unwrap_or(false) + { + has_final = true; + } + } + + if transcript_parts.is_empty() { + return None; + } + + let transcript = transcript_parts.join(" "); + let average_confidence = if confidence_count == 0 { + 0.0 + } else { + confidence / confidence_count as f64 + }; + + Some((transcript, average_confidence, has_final)) +} + +struct SpeechRecognitionRuntime { + client: WsClient, + session: Rc, +} + +thread_local! { + static SPEECH_RECOGNITION_RUNTIME: RefCell> = const { RefCell::new(None) }; +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("speech-recognition module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + SPEECH_RECOGNITION_RUNTIME.with(|runtime| runtime.borrow().is_some()) +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + if is_running() { + return Ok(()); + } + + set_module_status("speech-recognition: entered run()")?; + log("entered run()")?; + + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("starting speech recognition session")?; + let session = Rc::new(SpeechRecognitionSession::new()?); + + SPEECH_RECOGNITION_RUNTIME.with(|runtime| { + runtime.borrow_mut().replace(SpeechRecognitionRuntime { + client: client.clone(), + session: session.clone(), + }); + }); + + set_module_status("speech-recognition: running")?; + + let start_time = js_sys::Date::now(); + let mut result_count = 0; + + while is_running() { + let elapsed_ms = js_sys::Date::now() - start_time; + if elapsed_ms > 30000.0 { + let _ = log("workflow finished automatically after 30 seconds"); + let _ = stop(); + break; + } + if result_count >= 3 { + let _ = log("workflow finished automatically after 3 recognition results"); + let _ = stop(); + break; + } + + log("awaiting speech recognition...")?; + let result_outcome = session.start().await; + + if !is_running() { + break; + } + + match result_outcome { + Ok(result) => { + result_count += 1; + let transcript = result.transcript(); + let confidence = result.confidence(); + log(&format!( + "speech recognized: \"{}\" (confidence={})", + transcript, confidence + ))?; + + client.send_client_event( + "speech", + "recognition_result", + json!({ + "transcript": transcript, + "confidence": confidence, + }), + )?; + + set_module_status(&format!( + "speech-recognition: result\n\"{}\"\nconfidence: {}", + transcript, confidence + ))?; + } + Err(error) => { + let message = describe_js_error(&error); + log(&format!("recognition error: {}", message))?; + // Sleep a bit before retrying to avoid tight error loops + sleep_ms(1000).await?; + } + } + } + + Ok(()) +} + +#[wasm_bindgen] +pub fn stop() -> Result<(), JsValue> { + SPEECH_RECOGNITION_RUNTIME.with(|runtime| { + if let Some(mut runtime) = runtime.borrow_mut().take() { + let _ = runtime.session.stop(); + runtime.client.disconnect(); + log("speech-recognition stopped")?; + } + Ok::<(), JsValue>(()) + })?; + + set_module_status("speech-recognition: stopped")?; + Ok(()) +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[speech-recognition] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-modules/video1/Cargo.toml b/services/ws-modules/video1/Cargo.toml new file mode 100644 index 0000000..45b143b --- /dev/null +++ b/services/ws-modules/video1/Cargo.toml @@ -0,0 +1,38 @@ +[package] +description = "video capture module" +name = "et-ws-video1" +version = "0.1.0" +edition.workspace = true +license.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +et-web.workspace = true +et-ws-wasm-agent = { path = "../../ws-wasm-agent" } +js-sys = "0.3" +serde.workspace = true +serde-wasm-bindgen = "0.6" +serde_json.workspace = true +tracing.workspace = true +tracing-wasm = "0.2" +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = [ + "CssStyleDeclaration", + "Document", + "Element", + "HtmlElement", + "MediaDevices", + "MediaStream", + "MediaStreamConstraints", + "MediaStreamTrack", + "Navigator", + "Window", + "console", +] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/services/ws-modules/video1/src/lib.rs b/services/ws-modules/video1/src/lib.rs new file mode 100644 index 0000000..7cc8c06 --- /dev/null +++ b/services/ws-modules/video1/src/lib.rs @@ -0,0 +1,257 @@ +use std::cell::RefCell; + +use et_web::get_media_devices; +use et_ws_wasm_agent::{WsClient, WsClientConfig, set_textarea_value}; +use js_sys::{Promise, Reflect}; +use serde_json::json; +use tracing::info; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; +use web_sys::{MediaStream, MediaStreamConstraints}; + +#[wasm_bindgen] +pub struct VideoCapture { + stream: MediaStream, +} + +#[wasm_bindgen] +impl VideoCapture { + #[wasm_bindgen(js_name = request)] + pub async fn request() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let media_devices = get_media_devices(&window.navigator())?; + + let constraints = MediaStreamConstraints::new(); + constraints.set_audio(&JsValue::FALSE); + constraints.set_video(&JsValue::TRUE); + + let promise = media_devices.get_user_media_with_constraints(&constraints)?; + let stream = JsFuture::from(promise).await?; + let stream: MediaStream = stream + .dyn_into() + .map_err(|_| JsValue::from_str("getUserMedia did not return a MediaStream"))?; + + info!( + "Video capture granted with {} video track(s)", + stream.get_video_tracks().length() + ); + + Ok(VideoCapture { stream }) + } + + #[wasm_bindgen(js_name = trackCount)] + pub fn track_count(&self) -> u32 { + self.stream.get_video_tracks().length() + } + + #[wasm_bindgen(js_name = rawStream)] + pub fn raw_stream(&self) -> JsValue { + self.stream.clone().into() + } + + pub fn stop(&self) { + let tracks = self.stream.get_tracks(); + for index in 0..tracks.length() { + if let Some(track) = tracks.get(index).dyn_ref::() { + track.stop(); + } + } + info!("Video capture tracks stopped"); + } +} + +struct VideoCaptureRuntime { + client: WsClient, + capture: VideoCapture, +} + +thread_local! { + static VIDEO_CAPTURE_RUNTIME: RefCell> = const { RefCell::new(None) }; +} + +#[wasm_bindgen(start)] +pub fn init() { + let _ = tracing_wasm::try_set_as_global_default(); + info!("video-capture module initialized"); +} + +#[wasm_bindgen] +pub fn metadata() -> JsValue { + serde_wasm_bindgen::to_value(&json!({ + "name": env!("CARGO_PKG_NAME"), + "description": env!("CARGO_PKG_DESCRIPTION"), + "version": env!("CARGO_PKG_VERSION"), + })) + .unwrap_or(JsValue::NULL) +} + +#[wasm_bindgen] +pub fn is_running() -> bool { + VIDEO_CAPTURE_RUNTIME.with(|runtime| runtime.borrow().is_some()) +} + +#[wasm_bindgen] +pub async fn run() -> Result<(), JsValue> { + if is_running() { + return Ok(()); + } + + set_module_status("video-capture: entered run()")?; + log("entered run()")?; + + let outcome = async { + let ws_url = websocket_url()?; + let mut client = WsClient::new(WsClientConfig::new(ws_url)); + client.connect()?; + wait_for_connected(&client).await?; + log(&format!("websocket connected with agent_id={}", client.get_client_id()))?; + + log("requesting video capture access")?; + let capture = VideoCapture::request().await?; + let tracks = capture.track_count(); + log(&format!("video capture granted: {} tracks", tracks))?; + + // Set up preview + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(preview_el) = document.get_element_by_id("video-preview") + { + Reflect::set(&preview_el, &JsValue::from_str("srcObject"), &capture.raw_stream())?; + if let Some(html_el) = preview_el.dyn_ref::() { + html_el.style().set_property("display", "block")?; + } + } + + client.send_client_event( + "video", + "access_granted", + json!({ + "track_count": tracks, + }), + )?; + + set_module_status("video-capture: running")?; + + VIDEO_CAPTURE_RUNTIME.with(|runtime| { + runtime.borrow_mut().replace(VideoCaptureRuntime { client, capture }); + }); + + let stop_callback = Closure::once_into_js(move || { + if is_running() { + let _ = log("workflow finished automatically after 10 seconds"); + let _ = stop(); + } + }); + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + window.set_timeout_with_callback_and_timeout_and_arguments_0(stop_callback.unchecked_ref(), 10000)?; + + Ok(()) + } + .await; + + if let Err(error) = &outcome { + let message = describe_js_error(error); + let _ = set_module_status(&format!("video-capture: error\n{}", message)); + let _ = log(&format!("error: {}", message)); + } + + outcome +} + +#[wasm_bindgen] +pub fn stop() -> Result<(), JsValue> { + VIDEO_CAPTURE_RUNTIME.with(|runtime| { + if let Some(mut runtime) = runtime.borrow_mut().take() { + runtime.capture.stop(); + runtime.client.disconnect(); + + // Hide preview + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(preview_el) = document.get_element_by_id("video-preview") + { + Reflect::set(&preview_el, &JsValue::from_str("srcObject"), &JsValue::NULL)?; + if let Some(html_el) = preview_el.dyn_ref::() { + html_el.style().set_property("display", "none")?; + } + } + + log("video-capture stopped")?; + } + Ok::<(), JsValue>(()) + })?; + + set_module_status("video-capture: stopped")?; + Ok(()) +} + +fn log(message: &str) -> Result<(), JsValue> { + let line = format!("[video-capture] {}", message); + web_sys::console::log_1(&JsValue::from_str(&line)); + + if let Some(window) = web_sys::window() + && let Some(document) = window.document() + && let Some(log_el) = document.get_element_by_id("log") + { + let current = log_el.text_content().unwrap_or_default(); + let next = if current.is_empty() { + line + } else { + format!("{}\n{}", current, line) + }; + log_el.set_text_content(Some(&next)); + } + + Ok(()) +} + +fn set_module_status(message: &str) -> Result<(), JsValue> { + set_textarea_value("module-output", message) +} + +fn describe_js_error(error: &JsValue) -> String { + error + .as_string() + .or_else(|| js_sys::JSON::stringify(error).ok().map(String::from)) + .unwrap_or_else(|| format!("{:?}", error)) +} + +async fn wait_for_connected(client: &WsClient) -> Result<(), JsValue> { + for _ in 0..100 { + if client.get_state() == "connected" { + return Ok(()); + } + sleep_ms(100).await?; + } + + Err(JsValue::from_str("Timed out waiting for websocket connection")) +} + +async fn sleep_ms(duration_ms: i32) -> Result<(), JsValue> { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let promise = Promise::new(&mut |resolve, reject| { + let callback = Closure::once_into_js(move || { + let _ = resolve.call0(&JsValue::NULL); + }); + + if let Err(error) = + window.set_timeout_with_callback_and_timeout_and_arguments_0(callback.unchecked_ref(), duration_ms) + { + let _ = reject.call1(&JsValue::NULL, &error); + } + }); + JsFuture::from(promise).await.map(|_| ()) +} + +fn websocket_url() -> Result { + let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; + let location = Reflect::get(window.as_ref(), &JsValue::from_str("location"))?; + let protocol = Reflect::get(&location, &JsValue::from_str("protocol"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.protocol is unavailable"))?; + let host = Reflect::get(&location, &JsValue::from_str("host"))? + .as_string() + .ok_or_else(|| JsValue::from_str("window.location.host is unavailable"))?; + let ws_protocol = if protocol == "https:" { "wss:" } else { "ws:" }; + Ok(format!("{}//{}/ws", ws_protocol, host)) +} diff --git a/services/ws-server/static/app.js b/services/ws-server/static/app.js index 2f9e258..32e5ea7 100644 --- a/services/ws-server/static/app.js +++ b/services/ws-server/static/app.js @@ -1,47 +1,13 @@ -import init, { - BluetoothAccess, - GeolocationReading, - GpuInfo, - GraphicsSupport, - initTracing, - MicrophoneAccess, - NfcScanResult, - SpeechRecognitionSession, - VideoCapture, - WebGpuProbeResult, - WsClient, - WsClientConfig, -} from "/pkg/et_ws_wasm_agent.js"; +import init, { initTracing, WsClient, WsClientConfig } from "/pkg/et_ws_wasm_agent.js"; console.log("app.js: module loading started"); const logEl = document.getElementById("log"); const moduleSelect = document.getElementById("module-select"); const runModuleButton = document.getElementById("run-module-button"); -const micButton = document.getElementById("mic-button"); -const videoButton = document.getElementById("video-button"); -const bluetoothButton = document.getElementById("bluetooth-button"); -const geolocationButton = document.getElementById("geolocation-button"); -const graphicsButton = document.getElementById("graphics-button"); -const webgpuTestButton = document.getElementById("webgpu-test-button"); -const gpuInfoButton = document.getElementById("gpu-info-button"); -const speechButton = document.getElementById("speech-button"); -const nfcButton = document.getElementById("nfc-button"); -const sensorsButton = document.getElementById("sensors-button"); const agentStatusEl = document.getElementById("agent-status"); const agentIdEl = document.getElementById("agent-id"); -const sensorOutputEl = document.getElementById("sensor-output"); -const videoPreview = document.getElementById("video-preview"); - -let microphone = null; -let videoCapture = null; -let bluetoothDevice = null; -let speechSession = null; -let speechListening = false; -let sensorsActive = false; -let orientationState = null; -let motionState = null; -let sendClientEvent = () => {}; + const STORED_AGENT_ID_KEY = "ws_wasm_agent.agent_id"; let currentAgentId = null; @@ -65,7 +31,6 @@ const populateModuleDropdown = async () => { const moduleNames = await resp.json(); append(`Found ${moduleNames.length} potential modules: ${moduleNames.join(", ")}`); - // Clear current options moduleSelect.innerHTML = ""; for (const name of moduleNames) { @@ -136,8 +101,6 @@ const loadWorkflowModule = async (moduleKey) => { return moduleConfig.loaded; } - // This part is mostly handled by populateModuleDropdown now - // but kept for robustness if called separately. const cacheBust = Date.now(); const moduleUrl = `${moduleConfig.moduleUrl}?v=${cacheBust}`; const wasmUrl = `${moduleConfig.wasmUrl}?v=${cacheBust}`; @@ -200,143 +163,6 @@ const handleProtocolMessage = (message) => { append(`agent_id assigned: ${parsed.agent_id}`); }; -const formatNumber = (value, digits = 3) => ( - Number.isFinite(value) ? value.toFixed(digits) : "n/a" -); - -const renderSensorOutput = () => { - const lines = [ - "Device sensor stream", - `updated: ${new Date().toLocaleTimeString()}`, - "", - "orientation", - ]; - - if (orientationState) { - lines.push(`alpha: ${formatNumber(orientationState.alpha)}`); - lines.push(`beta: ${formatNumber(orientationState.beta)}`); - lines.push(`gamma: ${formatNumber(orientationState.gamma)}`); - lines.push(`absolute: ${orientationState.absolute === null ? "n/a" : String(orientationState.absolute)}`); - } else { - lines.push("waiting for orientation event..."); - } - - lines.push(""); - lines.push("motion"); - if (motionState) { - lines.push( - `acceleration: x=${formatNumber(motionState.acceleration?.x)} y=${formatNumber(motionState.acceleration?.y)} z=${ - formatNumber(motionState.acceleration?.z) - }`, - ); - lines.push( - `acceleration including gravity: x=${formatNumber(motionState.accelerationIncludingGravity?.x)} y=${ - formatNumber(motionState.accelerationIncludingGravity?.y) - } z=${formatNumber(motionState.accelerationIncludingGravity?.z)}`, - ); - lines.push( - `rotation rate: alpha=${formatNumber(motionState.rotationRate?.alpha)} beta=${ - formatNumber(motionState.rotationRate?.beta) - } gamma=${formatNumber(motionState.rotationRate?.gamma)}`, - ); - lines.push(`interval: ${formatNumber(motionState.interval, 1)} ms`); - } else { - lines.push("waiting for motion event..."); - } - - sensorOutputEl.value = lines.join("\n"); -}; - -const handleOrientation = (event) => { - orientationState = { - alpha: event.alpha, - beta: event.beta, - gamma: event.gamma, - absolute: typeof event.absolute === "boolean" ? event.absolute : null, - }; - renderSensorOutput(); -}; - -const handleMotion = (event) => { - const accelerationIncludingGravity = event.accelerationIncludingGravity - ? { - x: event.accelerationIncludingGravity.x ?? 0, - y: event.accelerationIncludingGravity.y ?? 0, - z: event.accelerationIncludingGravity.z ?? 0, - } - : null; - - motionState = { - acceleration: event.acceleration - ? { - x: event.acceleration.x, - y: event.acceleration.y, - z: event.acceleration.z, - } - : null, - accelerationIncludingGravity, - rotationRate: event.rotationRate - ? { - alpha: event.rotationRate.alpha, - beta: event.rotationRate.beta, - gamma: event.rotationRate.gamma, - } - : null, - interval: event.interval, - }; - renderSensorOutput(); -}; - -const requestSensorPermission = async (permissionTarget) => { - if (typeof permissionTarget?.requestPermission !== "function") { - return "granted"; - } - - return permissionTarget.requestPermission(); -}; - -const stopSensorsFlow = () => { - window.removeEventListener("deviceorientation", handleOrientation); - window.removeEventListener("devicemotion", handleMotion); - sensorsActive = false; - sensorsButton.textContent = "Start sensors"; - append("device sensors stopped"); -}; - -const startSensorsFlow = async () => { - if ( - typeof window.DeviceOrientationEvent === "undefined" - && typeof window.DeviceMotionEvent === "undefined" - ) { - throw new Error("Device orientation and motion APIs are not supported in this browser."); - } - - const [orientationPermission, motionPermission] = await Promise.all([ - requestSensorPermission(window.DeviceOrientationEvent), - requestSensorPermission(window.DeviceMotionEvent), - ]); - - if ( - orientationPermission !== "granted" - || motionPermission !== "granted" - ) { - throw new Error( - `Sensor permission denied (orientation=${orientationPermission}, motion=${motionPermission})`, - ); - } - - orientationState = null; - motionState = null; - renderSensorOutput(); - window.addEventListener("deviceorientation", handleOrientation); - window.addEventListener("devicemotion", handleMotion); - sensorsActive = true; - sensorsButton.textContent = "Stop sensors"; - append("device sensors started; streaming locally to textbox"); -}; - -renderSensorOutput(); - const wsProtocol = window.location.protocol === "https:" ? "wss:" : "ws:"; const wsUrl = `${wsProtocol}//${window.location.host}/ws`; const retainedAgentId = readStoredAgentId(); @@ -363,22 +189,6 @@ try { const config = new WsClientConfig(wsUrl); const client = new WsClient(config); - sendClientEvent = (capability, action, details) => { - const payload = JSON.stringify({ - type: "client_event", - capability, - action, - details, - }); - - try { - client.send(payload); - } catch (error) { - append(`ws send error: ${error instanceof Error ? error.message : String(error)}`); - console.error(error); - } - }; - client.set_on_state_change((state) => { append(`state: ${state}`); if (state === "connecting") { @@ -414,255 +224,6 @@ try { ); append(`client_id: ${client.get_client_id() || "(awaiting server assignment)"}`); - micButton.addEventListener("click", async () => { - try { - if (microphone) { - microphone.stop(); - microphone = null; - micButton.textContent = "Start microphone"; - delete window.microphone; - append("microphone stopped"); - sendClientEvent("microphone", "stopped", { track_count: 0 }); - return; - } - - microphone = await MicrophoneAccess.request(); - micButton.textContent = "Stop microphone"; - append(`microphone granted: ${microphone.trackCount()} audio track(s)`); - window.microphone = microphone; - sendClientEvent("microphone", "started", { - track_count: microphone.trackCount(), - }); - } catch (error) { - append(`microphone error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("microphone", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - videoButton.addEventListener("click", async () => { - try { - if (videoCapture) { - videoCapture.stop(); - videoCapture = null; - videoPreview.srcObject = null; - videoPreview.hidden = true; - videoButton.textContent = "Start video"; - delete window.videoCapture; - append("video stopped"); - sendClientEvent("video", "stopped", { track_count: 0 }); - return; - } - - videoCapture = await VideoCapture.request(); - videoPreview.srcObject = videoCapture.rawStream(); - videoPreview.hidden = false; - videoButton.textContent = "Stop video"; - append(`video granted: ${videoCapture.trackCount()} video track(s)`); - window.videoCapture = videoCapture; - sendClientEvent("video", "started", { - track_count: videoCapture.trackCount(), - }); - } catch (error) { - append(`video error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("video", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - bluetoothButton.addEventListener("click", async () => { - try { - bluetoothDevice = await BluetoothAccess.request(); - append( - `bluetooth selected: name=${bluetoothDevice.name()} id=${bluetoothDevice.id()}`, - ); - window.bluetoothDevice = bluetoothDevice; - sendClientEvent("bluetooth", "selected", { - name: bluetoothDevice.name(), - id: bluetoothDevice.id(), - gatt_connected: bluetoothDevice.gattConnected(), - }); - } catch (error) { - append(`bluetooth error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("bluetooth", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - geolocationButton.addEventListener("click", async () => { - try { - const location = await GeolocationReading.request(); - append( - `geolocation: lat=${location.latitude()} lon=${location.longitude()} accuracy=${location.accuracyMeters()}m`, - ); - sendClientEvent("geolocation", "reading", { - latitude: location.latitude(), - longitude: location.longitude(), - accuracy_meters: location.accuracyMeters(), - }); - window.locationReading = location; - } catch (error) { - append(`geolocation error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("geolocation", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - graphicsButton.addEventListener("click", () => { - try { - const graphics = GraphicsSupport.detect(); - append( - `graphics: webgl=${graphics.webglSupported()} ` - + `webgl2=${graphics.webgl2Supported()} ` - + `webgpu=${graphics.webgpuSupported()} ` - + `webnn=${graphics.webnnSupported()}`, - ); - sendClientEvent("graphics", "detected", { - webgl_supported: graphics.webglSupported(), - webgl2_supported: graphics.webgl2Supported(), - webgpu_supported: graphics.webgpuSupported(), - webnn_supported: graphics.webnnSupported(), - }); - window.graphicsSupport = graphics; - } catch (error) { - append(`graphics error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("graphics", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - webgpuTestButton.addEventListener("click", async () => { - try { - const probe = await WebGpuProbeResult.test(); - append( - `webgpu probe: adapter_found=${probe.adapterFound()} device_created=${probe.deviceCreated()}`, - ); - sendClientEvent("webgpu", "probe", { - adapter_found: probe.adapterFound(), - device_created: probe.deviceCreated(), - }); - window.webgpuProbe = probe; - } catch (error) { - append(`webgpu error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("webgpu", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - gpuInfoButton.addEventListener("click", async () => { - try { - const gpuInfo = await GpuInfo.detect(); - append( - `gpu info: source=${gpuInfo.source()} vendor=${gpuInfo.vendor()} ` - + `renderer=${gpuInfo.renderer()} architecture=${gpuInfo.architecture()} ` - + `description=${gpuInfo.description()}`, - ); - sendClientEvent("gpu", "info", { - source: gpuInfo.source(), - vendor: gpuInfo.vendor(), - renderer: gpuInfo.renderer(), - architecture: gpuInfo.architecture(), - description: gpuInfo.description(), - }); - window.gpuInfo = gpuInfo; - } catch (error) { - append(`gpu info error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("gpu", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } - }); - - speechButton.addEventListener("click", async () => { - if (speechListening && speechSession) { - try { - speechButton.disabled = true; - speechButton.textContent = "Finishing..."; - await speechSession.stop(); - append("speech recognition finalizing"); - } catch (error) { - append(`speech stop error: ${error instanceof Error ? error.message : String(error)}`); - } - return; - } - - try { - speechSession = new SpeechRecognitionSession(); - speechListening = true; - speechButton.disabled = false; - speechButton.textContent = "Stop speech"; - const speech = await speechSession.start(); - append( - `speech: transcript="${speech.transcript()}" confidence=${speech.confidence()}`, - ); - sendClientEvent("speech", "recognized", { - transcript: speech.transcript(), - confidence: speech.confidence(), - }); - window.speechRecognitionResult = speech; - } catch (error) { - append(`speech error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("speech", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } finally { - speechListening = false; - speechButton.disabled = false; - speechButton.textContent = "Recognize speech"; - } - }); - - nfcButton.addEventListener("click", async () => { - try { - nfcButton.disabled = true; - nfcButton.textContent = "Scanning..."; - const scan = await NfcScanResult.scanOnce(); - append(`nfc: serial=${scan.serialNumber()} records=${scan.recordSummary()}`); - sendClientEvent("nfc", "scanned", { - serial_number: scan.serialNumber(), - record_summary: scan.recordSummary(), - }); - window.nfcScan = scan; - } catch (error) { - append(`nfc error: ${error instanceof Error ? error.message : String(error)}`); - sendClientEvent("nfc", "error", { - message: error instanceof Error ? error.message : String(error), - }); - console.error(error); - } finally { - nfcButton.disabled = false; - nfcButton.textContent = "Scan NFC"; - } - }); - - sensorsButton.addEventListener("click", async () => { - try { - if (sensorsActive) { - stopSensorsFlow(); - return; - } - - await startSensorsFlow(); - } catch (error) { - append(`sensor error: ${describeError(error)}`); - console.error(error); - } - }); - runModuleButton.addEventListener("click", async () => { const selectedModule = WORKFLOW_MODULES.get(moduleSelect.value); runModuleButton.disabled = true; @@ -685,15 +246,6 @@ try { window.client = client; window.sendAlive = () => client.send_alive(); - window.runWorkflowModule = (moduleKey) => { - if (moduleKey && WORKFLOW_MODULES.has(moduleKey)) { - moduleSelect.value = moduleKey; - } - return runSelectedWorkflowModule(); - }; - window.runHarModule = () => window.runWorkflowModule("har1"); - window.runFaceDetectionModule = () => window.runWorkflowModule("face-detection"); - window.runComm1Module = () => window.runWorkflowModule("comm1"); } catch (error) { append(`error: ${error instanceof Error ? error.message : String(error)}`); console.error(error); diff --git a/services/ws-server/static/index.html b/services/ws-server/static/index.html index e100aa8..81cc191 100644 --- a/services/ws-server/static/index.html +++ b/services/ws-server/static/index.html @@ -153,25 +153,6 @@

WASM web agent

>Waiting for device sensor data…
Booting…
- -

- - - - - - - - - - -

diff --git a/services/ws-wasm-agent/Cargo.toml b/services/ws-wasm-agent/Cargo.toml index d630f04..2ac7d52 100644 --- a/services/ws-wasm-agent/Cargo.toml +++ b/services/ws-wasm-agent/Cargo.toml @@ -17,19 +17,12 @@ serde_json.workspace = true tracing.workspace = true tracing-wasm = "0.2" wasm-bindgen = "0.2" -wasm-bindgen-futures = "0.4" web-sys = { version = "0.3", features = [ - "AddEventListenerOptions", "BinaryType", "Document", - "DomError", "Event", "EventTarget", "HtmlCanvasElement", - "MediaDevices", - "MediaStream", - "MediaStreamConstraints", - "MediaStreamTrack", "MessageEvent", "Navigator", "Storage", diff --git a/services/ws-wasm-agent/src/lib.rs b/services/ws-wasm-agent/src/lib.rs index d27c108..914e729 100644 --- a/services/ws-wasm-agent/src/lib.rs +++ b/services/ws-wasm-agent/src/lib.rs @@ -1,4 +1,3 @@ -use std::cell::Cell; use std::cell::RefCell; use std::collections::VecDeque; use std::rc::Rc; @@ -6,8 +5,7 @@ use std::rc::Rc; use edge_toolkit::ws::{ConnectStatus, WsMessage}; use tracing::{error, info, warn}; use wasm_bindgen::prelude::*; -use wasm_bindgen_futures::JsFuture; -use web_sys::{Event, MediaStream, MediaStreamConstraints, MessageEvent, WebSocket}; +use web_sys::{Event, MessageEvent, WebSocket}; const STORED_AGENT_ID_KEY: &str = "ws_wasm_agent.agent_id"; const STORED_LAST_OFFLINE_AT_KEY: &str = "ws_wasm_agent.last_offline_at"; @@ -15,7 +13,6 @@ const MAX_OFFLINE_QUEUE_LEN: usize = 1000; /// Default cadence for client-side app-level `Alive` messages sent to the websocket server. /// This should remain comfortably lower than the server's idle connection timeout. const DEFAULT_ALIVE_INTERVAL_MS: u32 = 5_000; -const SENSOR_PERMISSION_GRANTED: &str = "granted"; // Initialize logging for WASM pub fn init_logging() { @@ -37,1366 +34,24 @@ pub enum ConnectionState { Reconnecting, } -#[wasm_bindgen] -pub struct MicrophoneAccess { - stream: MediaStream, -} - -#[wasm_bindgen] -pub struct VideoCapture { - stream: MediaStream, -} - -#[wasm_bindgen] -pub struct BluetoothAccess { - device: JsValue, -} - -#[wasm_bindgen] -pub struct GeolocationReading { - latitude: f64, - longitude: f64, - accuracy_meters: f64, -} - -#[wasm_bindgen] -pub struct GraphicsSupport { - webgl_supported: bool, - webgl2_supported: bool, - webgpu_supported: bool, - webnn_supported: bool, -} - -#[wasm_bindgen] -pub struct WebGpuProbeResult { - adapter_found: bool, - device_created: bool, -} - -#[wasm_bindgen] -pub struct GpuInfo { - vendor: String, - renderer: String, - architecture: String, - description: String, - source: String, -} - -#[wasm_bindgen] -pub struct SpeechRecognitionResult { - transcript: String, - confidence: f64, -} - -#[wasm_bindgen] -pub struct SpeechRecognitionSession { - recognition: JsValue, - stop_requested: Rc>, -} - -#[wasm_bindgen] -pub struct NfcScanResult { - serial_number: String, - record_summary: String, -} - -#[derive(Clone, Default)] -struct OrientationReadingState { - alpha: Option, - beta: Option, - gamma: Option, - absolute: Option, -} - -#[derive(Clone, Default)] -struct MotionReadingState { - acceleration_x: Option, - acceleration_y: Option, - acceleration_z: Option, - acceleration_including_gravity_x: Option, - acceleration_including_gravity_y: Option, - acceleration_including_gravity_z: Option, - rotation_rate_alpha: Option, - rotation_rate_beta: Option, - rotation_rate_gamma: Option, - interval_ms: Option, -} - -#[wasm_bindgen] -pub struct OrientationReading { - inner: OrientationReadingState, -} - -#[wasm_bindgen] -pub struct MotionReading { - inner: MotionReadingState, -} - -#[wasm_bindgen] -pub struct DeviceSensors { - active: bool, - orientation_state: Rc>>, - motion_state: Rc>>, - orientation_listener: Option>, - motion_listener: Option>, -} - -#[wasm_bindgen] -impl MicrophoneAccess { - #[wasm_bindgen(js_name = request)] - pub async fn request() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let media_devices = get_media_devices(&window.navigator())?; - - let constraints = MediaStreamConstraints::new(); - constraints.set_audio(&JsValue::TRUE); - constraints.set_video(&JsValue::FALSE); - - let promise = media_devices.get_user_media_with_constraints(&constraints)?; - let stream = JsFuture::from(promise).await?; - let stream: MediaStream = stream - .dyn_into() - .map_err(|_| JsValue::from_str("getUserMedia did not return a MediaStream"))?; - - info!( - "Microphone access granted with {} audio track(s)", - stream.get_audio_tracks().length() - ); - - Ok(MicrophoneAccess { stream }) - } - - #[wasm_bindgen(js_name = trackCount)] - pub fn track_count(&self) -> u32 { - self.stream.get_audio_tracks().length() - } - - #[wasm_bindgen(js_name = rawStream)] - pub fn raw_stream(&self) -> JsValue { - self.stream.clone().into() - } - - pub fn stop(&self) { - let tracks = self.stream.get_tracks(); - for index in 0..tracks.length() { - if let Some(track) = tracks.get(index).dyn_ref::() { - track.stop(); - } - } - info!("Microphone tracks stopped"); - } -} - -#[wasm_bindgen] -impl VideoCapture { - #[wasm_bindgen(js_name = request)] - pub async fn request() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let media_devices = get_media_devices(&window.navigator())?; - - let constraints = MediaStreamConstraints::new(); - constraints.set_audio(&JsValue::FALSE); - constraints.set_video(&JsValue::TRUE); - - let promise = media_devices.get_user_media_with_constraints(&constraints)?; - let stream = JsFuture::from(promise).await?; - let stream: MediaStream = stream - .dyn_into() - .map_err(|_| JsValue::from_str("getUserMedia did not return a MediaStream"))?; - - info!( - "Video capture granted with {} video track(s)", - stream.get_video_tracks().length() - ); - - Ok(VideoCapture { stream }) - } - - #[wasm_bindgen(js_name = trackCount)] - pub fn track_count(&self) -> u32 { - self.stream.get_video_tracks().length() - } - - #[wasm_bindgen(js_name = rawStream)] - pub fn raw_stream(&self) -> JsValue { - self.stream.clone().into() - } - - pub fn stop(&self) { - let tracks = self.stream.get_tracks(); - for index in 0..tracks.length() { - if let Some(track) = tracks.get(index).dyn_ref::() { - track.stop(); - } - } - info!("Video capture tracks stopped"); - } -} - -#[wasm_bindgen] -impl BluetoothAccess { - #[wasm_bindgen(js_name = request)] - pub async fn request() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let navigator = window.navigator(); - let bluetooth = js_sys::Reflect::get(&navigator, &JsValue::from_str("bluetooth"))?; - if bluetooth.is_undefined() || bluetooth.is_null() { - return Err(JsValue::from_str( - "Web Bluetooth is not available in this browser context", - )); - } - - let options = js_sys::Object::new(); - js_sys::Reflect::set(&options, &JsValue::from_str("acceptAllDevices"), &JsValue::TRUE)?; - - let request_device = js_sys::Reflect::get(&bluetooth, &JsValue::from_str("requestDevice"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("navigator.bluetooth.requestDevice is not callable"))?; - let promise = request_device - .call1(&bluetooth, &options)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestDevice did not return a Promise"))?; - let device = JsFuture::from(promise).await?; - - info!( - "Bluetooth device selected: {:?}", - js_sys::Reflect::get(&device, &JsValue::from_str("name")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()) - ); - - Ok(BluetoothAccess { device }) - } - - pub fn id(&self) -> String { - js_sys::Reflect::get(&self.device, &JsValue::from_str("id")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_default() - } - - pub fn name(&self) -> String { - js_sys::Reflect::get(&self.device, &JsValue::from_str("name")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()) - } - - #[wasm_bindgen(js_name = gattConnected)] - pub fn gatt_connected(&self) -> bool { - js_sys::Reflect::get(&self.device, &JsValue::from_str("gatt")) - .ok() - .filter(|gatt| !gatt.is_null() && !gatt.is_undefined()) - .and_then(|gatt| js_sys::Reflect::get(&gatt, &JsValue::from_str("connected")).ok()) - .and_then(|value| value.as_bool()) - .unwrap_or(false) - } - - #[wasm_bindgen(js_name = connectGatt)] - pub async fn connect_gatt(&self) -> Result<(), JsValue> { - let gatt = js_sys::Reflect::get(&self.device, &JsValue::from_str("gatt"))?; - if gatt.is_null() || gatt.is_undefined() { - return Err(JsValue::from_str("Selected device has no GATT server")); - } - - let connect = js_sys::Reflect::get(&gatt, &JsValue::from_str("connect"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("device.gatt.connect is not callable"))?; - let promise = connect - .call0(&gatt)? - .dyn_into::() - .map_err(|_| JsValue::from_str("device.gatt.connect did not return a Promise"))?; - let _server = JsFuture::from(promise).await?; - info!("Connected to Bluetooth GATT server for {}", self.name()); - Ok(()) - } -} - -#[wasm_bindgen] -impl GeolocationReading { - #[wasm_bindgen(js_name = request)] - pub async fn request() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let navigator = window.navigator(); - let geolocation = js_sys::Reflect::get(&navigator, &JsValue::from_str("geolocation"))?; - if geolocation.is_undefined() || geolocation.is_null() { - return Err(JsValue::from_str( - "navigator.geolocation is unavailable. Use https://... or http://localhost and allow access.", - )); - } - - let options = js_sys::Object::new(); - js_sys::Reflect::set(&options, &JsValue::from_str("enableHighAccuracy"), &JsValue::TRUE)?; - js_sys::Reflect::set(&options, &JsValue::from_str("maximumAge"), &JsValue::from_f64(0.0))?; - js_sys::Reflect::set(&options, &JsValue::from_str("timeout"), &JsValue::from_f64(10_000.0))?; - - let promise = js_sys::Promise::new(&mut |resolve, reject| { - let reject_for_callback = reject.clone(); - let success = Closure::once(Box::new(move |position: JsValue| { - let _ = resolve.call1(&JsValue::NULL, &position); - }) as Box); - - let failure = Closure::once(Box::new(move |error: JsValue| { - let _ = reject_for_callback.call1(&JsValue::NULL, &error); - }) as Box); - - let get_current_position = js_sys::Reflect::get(&geolocation, &JsValue::from_str("getCurrentPosition")) - .ok() - .and_then(|value| value.dyn_into::().ok()); - - if let Some(get_current_position) = get_current_position { - let _ = get_current_position.call3( - &geolocation, - success.as_ref().unchecked_ref(), - failure.as_ref().unchecked_ref(), - &options, - ); - } else { - let _ = reject.call1( - &JsValue::NULL, - &JsValue::from_str("navigator.geolocation.getCurrentPosition is not callable"), - ); - } - - success.forget(); - failure.forget(); - }); - - let position = JsFuture::from(promise).await?; - let coords = js_sys::Reflect::get(&position, &JsValue::from_str("coords"))?; - let latitude = js_sys::Reflect::get(&coords, &JsValue::from_str("latitude"))? - .as_f64() - .ok_or_else(|| JsValue::from_str("Geolocation latitude is missing"))?; - let longitude = js_sys::Reflect::get(&coords, &JsValue::from_str("longitude"))? - .as_f64() - .ok_or_else(|| JsValue::from_str("Geolocation longitude is missing"))?; - let accuracy_meters = js_sys::Reflect::get(&coords, &JsValue::from_str("accuracy"))? - .as_f64() - .ok_or_else(|| JsValue::from_str("Geolocation accuracy is missing"))?; - - info!( - "Geolocation reading acquired: latitude={} longitude={} accuracy={}m", - latitude, longitude, accuracy_meters - ); - - Ok(GeolocationReading { - latitude, - longitude, - accuracy_meters, - }) - } - - pub fn latitude(&self) -> f64 { - self.latitude - } - - pub fn longitude(&self) -> f64 { - self.longitude - } - - #[wasm_bindgen(js_name = accuracyMeters)] - pub fn accuracy_meters(&self) -> f64 { - self.accuracy_meters - } -} - -#[wasm_bindgen] -impl OrientationReading { - pub fn alpha(&self) -> f64 { - self.inner.alpha.unwrap_or(0.0) - } - - pub fn beta(&self) -> f64 { - self.inner.beta.unwrap_or(0.0) - } - - pub fn gamma(&self) -> f64 { - self.inner.gamma.unwrap_or(0.0) - } - - pub fn absolute(&self) -> bool { - self.inner.absolute.unwrap_or(false) - } -} - -#[wasm_bindgen] -impl MotionReading { - #[wasm_bindgen(js_name = accelerationX)] - pub fn acceleration_x(&self) -> f64 { - self.inner.acceleration_x.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = accelerationY)] - pub fn acceleration_y(&self) -> f64 { - self.inner.acceleration_y.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = accelerationZ)] - pub fn acceleration_z(&self) -> f64 { - self.inner.acceleration_z.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = accelerationIncludingGravityX)] - pub fn acceleration_including_gravity_x(&self) -> f64 { - self.inner.acceleration_including_gravity_x.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = accelerationIncludingGravityY)] - pub fn acceleration_including_gravity_y(&self) -> f64 { - self.inner.acceleration_including_gravity_y.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = accelerationIncludingGravityZ)] - pub fn acceleration_including_gravity_z(&self) -> f64 { - self.inner.acceleration_including_gravity_z.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = rotationRateAlpha)] - pub fn rotation_rate_alpha(&self) -> f64 { - self.inner.rotation_rate_alpha.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = rotationRateBeta)] - pub fn rotation_rate_beta(&self) -> f64 { - self.inner.rotation_rate_beta.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = rotationRateGamma)] - pub fn rotation_rate_gamma(&self) -> f64 { - self.inner.rotation_rate_gamma.unwrap_or(0.0) - } - - #[wasm_bindgen(js_name = intervalMs)] - pub fn interval_ms(&self) -> f64 { - self.inner.interval_ms.unwrap_or(0.0) - } -} - -impl Default for DeviceSensors { - fn default() -> Self { - Self::new() - } -} - -#[wasm_bindgen] -impl DeviceSensors { - #[wasm_bindgen(constructor)] - pub fn new() -> DeviceSensors { - DeviceSensors { - active: false, - orientation_state: Rc::new(RefCell::new(None)), - motion_state: Rc::new(RefCell::new(None)), - orientation_listener: None, - motion_listener: None, - } - } - - pub async fn start(&mut self) -> Result<(), JsValue> { - if self.active { - return Ok(()); - } - - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - - if js_sys::Reflect::get(&window, &JsValue::from_str("DeviceOrientationEvent"))?.is_undefined() - && js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?.is_undefined() - { - return Err(JsValue::from_str( - "Device orientation and motion APIs are not supported in this browser.", - )); - } - - let orientation_permission = request_sensor_permission(js_sys::Reflect::get( - &window, - &JsValue::from_str("DeviceOrientationEvent"), - )?) - .await?; - let motion_permission = - request_sensor_permission(js_sys::Reflect::get(&window, &JsValue::from_str("DeviceMotionEvent"))?).await?; - - if orientation_permission != SENSOR_PERMISSION_GRANTED || motion_permission != SENSOR_PERMISSION_GRANTED { - return Err(JsValue::from_str(&format!( - "Sensor permission denied (orientation={orientation_permission}, motion={motion_permission})" - ))); - } - - *self.orientation_state.borrow_mut() = None; - *self.motion_state.borrow_mut() = None; - - let orientation_state = self.orientation_state.clone(); - let orientation_listener = Closure::wrap(Box::new(move |event: Event| { - let value: JsValue = event.into(); - *orientation_state.borrow_mut() = Some(OrientationReadingState { - alpha: js_number_field(&value, "alpha"), - beta: js_number_field(&value, "beta"), - gamma: js_number_field(&value, "gamma"), - absolute: js_bool_field(&value, "absolute"), - }); - }) as Box); - - let motion_state = self.motion_state.clone(); - let motion_listener = Closure::wrap(Box::new(move |event: Event| { - let value: JsValue = event.into(); - let acceleration = js_nested_object(&value, "acceleration"); - let acceleration_including_gravity = js_nested_object(&value, "accelerationIncludingGravity"); - let rotation_rate = js_nested_object(&value, "rotationRate"); - - *motion_state.borrow_mut() = Some(MotionReadingState { - acceleration_x: acceleration.as_ref().and_then(|v| js_number_field(v, "x")), - acceleration_y: acceleration.as_ref().and_then(|v| js_number_field(v, "y")), - acceleration_z: acceleration.as_ref().and_then(|v| js_number_field(v, "z")), - acceleration_including_gravity_x: acceleration_including_gravity - .as_ref() - .and_then(|v| js_number_field(v, "x")), - acceleration_including_gravity_y: acceleration_including_gravity - .as_ref() - .and_then(|v| js_number_field(v, "y")), - acceleration_including_gravity_z: acceleration_including_gravity - .as_ref() - .and_then(|v| js_number_field(v, "z")), - rotation_rate_alpha: rotation_rate.as_ref().and_then(|v| js_number_field(v, "alpha")), - rotation_rate_beta: rotation_rate.as_ref().and_then(|v| js_number_field(v, "beta")), - rotation_rate_gamma: rotation_rate.as_ref().and_then(|v| js_number_field(v, "gamma")), - interval_ms: js_number_field(&value, "interval"), - }); - }) as Box); - - let target: &web_sys::EventTarget = window.as_ref(); - target.add_event_listener_with_callback("deviceorientation", orientation_listener.as_ref().unchecked_ref())?; - target.add_event_listener_with_callback("devicemotion", motion_listener.as_ref().unchecked_ref())?; - - self.orientation_listener = Some(orientation_listener); - self.motion_listener = Some(motion_listener); - self.active = true; - info!("Device sensors started"); - Ok(()) - } - - pub fn stop(&mut self) -> Result<(), JsValue> { - if !self.active { - return Ok(()); - } - - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let target: &web_sys::EventTarget = window.as_ref(); - - if let Some(listener) = self.orientation_listener.as_ref() { - target.remove_event_listener_with_callback("deviceorientation", listener.as_ref().unchecked_ref())?; - } - - if let Some(listener) = self.motion_listener.as_ref() { - target.remove_event_listener_with_callback("devicemotion", listener.as_ref().unchecked_ref())?; - } - - self.orientation_listener = None; - self.motion_listener = None; - self.active = false; - info!("Device sensors stopped"); - Ok(()) - } - - #[wasm_bindgen(js_name = isActive)] - pub fn is_active(&self) -> bool { - self.active - } - - #[wasm_bindgen(js_name = hasOrientation)] - pub fn has_orientation(&self) -> bool { - self.orientation_state.borrow().is_some() - } - - #[wasm_bindgen(js_name = hasMotion)] - pub fn has_motion(&self) -> bool { - self.motion_state.borrow().is_some() - } - - #[wasm_bindgen(js_name = orientationSnapshot)] - pub fn orientation_snapshot(&self) -> Result { - self.orientation_state - .borrow() - .clone() - .map(|inner| OrientationReading { inner }) - .ok_or_else(|| JsValue::from_str("No orientation reading available yet")) - } - - #[wasm_bindgen(js_name = motionSnapshot)] - pub fn motion_snapshot(&self) -> Result { - self.motion_state - .borrow() - .clone() - .map(|inner| MotionReading { inner }) - .ok_or_else(|| JsValue::from_str("No motion reading available yet")) - } -} - -#[wasm_bindgen] -impl GraphicsSupport { - #[wasm_bindgen(js_name = detect)] - pub fn detect() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let document = window - .document() - .ok_or_else(|| JsValue::from_str("No document available"))?; - let canvas = document - .create_element("canvas")? - .dyn_into::() - .map_err(|_| JsValue::from_str("Failed to create canvas element"))?; - - let webgl_supported = canvas.get_context("webgl")?.is_some(); - let webgl2_supported = canvas.get_context("webgl2")?.is_some(); - let webgpu_supported = js_sys::Reflect::get(&window.navigator(), &JsValue::from_str("gpu"))?.is_object(); - let webnn_supported = js_sys::Reflect::get(&window.navigator(), &JsValue::from_str("ml"))?.is_object(); - - info!( - "Graphics support detected: webgl={} webgl2={} webgpu={} webnn={}", - webgl_supported, webgl2_supported, webgpu_supported, webnn_supported - ); - - Ok(GraphicsSupport { - webgl_supported, - webgl2_supported, - webgpu_supported, - webnn_supported, - }) - } - - #[wasm_bindgen(js_name = webglSupported)] - pub fn webgl_supported(&self) -> bool { - self.webgl_supported - } - - #[wasm_bindgen(js_name = webgl2Supported)] - pub fn webgl2_supported(&self) -> bool { - self.webgl2_supported - } - - #[wasm_bindgen(js_name = webgpuSupported)] - pub fn webgpu_supported(&self) -> bool { - self.webgpu_supported - } - - #[wasm_bindgen(js_name = webnnSupported)] - pub fn webnn_supported(&self) -> bool { - self.webnn_supported - } -} - -#[wasm_bindgen] -impl WebGpuProbeResult { - #[wasm_bindgen(js_name = test)] - pub async fn test() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let navigator = window.navigator(); - let gpu = js_sys::Reflect::get(&navigator, &JsValue::from_str("gpu"))?; - - if gpu.is_null() || gpu.is_undefined() { - return Ok(WebGpuProbeResult { - adapter_found: false, - device_created: false, - }); - } - - let request_adapter = js_sys::Reflect::get(&gpu, &JsValue::from_str("requestAdapter"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("navigator.gpu.requestAdapter is not callable"))?; - - let adapter_promise = request_adapter - .call0(&gpu)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestAdapter did not return a Promise"))?; - let adapter = JsFuture::from(adapter_promise).await?; - - if adapter.is_null() || adapter.is_undefined() { - info!("WebGPU probe: no adapter available"); - return Ok(WebGpuProbeResult { - adapter_found: false, - device_created: false, - }); - } - - let request_device = js_sys::Reflect::get(&adapter, &JsValue::from_str("requestDevice"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("adapter.requestDevice is not callable"))?; - - let device_promise = request_device - .call0(&adapter)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestDevice did not return a Promise"))?; - let device = JsFuture::from(device_promise).await?; - - let device_created = !device.is_null() && !device.is_undefined(); - info!( - "WebGPU probe completed: adapter_found=true device_created={}", - device_created - ); - - Ok(WebGpuProbeResult { - adapter_found: true, - device_created, - }) - } - - #[wasm_bindgen(js_name = adapterFound)] - pub fn adapter_found(&self) -> bool { - self.adapter_found - } - - #[wasm_bindgen(js_name = deviceCreated)] - pub fn device_created(&self) -> bool { - self.device_created - } -} - -#[wasm_bindgen] -impl GpuInfo { - #[wasm_bindgen(js_name = detect)] - pub async fn detect() -> Result { - if let Some(info) = detect_webgpu_info().await? { - return Ok(info); - } - - if let Some(info) = detect_webgl_info()? { - return Ok(info); - } - - Ok(GpuInfo { - vendor: "unknown".to_string(), - renderer: "unknown".to_string(), - architecture: "unknown".to_string(), - description: "No GPU details exposed by this browser".to_string(), - source: "none".to_string(), - }) - } - - pub fn vendor(&self) -> String { - self.vendor.clone() - } - - pub fn renderer(&self) -> String { - self.renderer.clone() - } - - pub fn architecture(&self) -> String { - self.architecture.clone() - } - - pub fn description(&self) -> String { - self.description.clone() - } - - pub fn source(&self) -> String { - self.source.clone() - } -} - -#[wasm_bindgen] -impl SpeechRecognitionResult { - #[wasm_bindgen(js_name = recognizeOnce)] - pub async fn recognize_once() -> Result { - let session = SpeechRecognitionSession::new()?; - session.start().await - } - - pub fn transcript(&self) -> String { - self.transcript.clone() - } - - pub fn confidence(&self) -> f64 { - self.confidence - } -} - -#[wasm_bindgen] -impl SpeechRecognitionSession { - #[wasm_bindgen(constructor)] - pub fn new() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let speech_recognition_ctor = js_sys::Reflect::get(&window, &JsValue::from_str("SpeechRecognition")) - .ok() - .filter(|value| !value.is_undefined() && !value.is_null()) - .or_else(|| { - js_sys::Reflect::get(&window, &JsValue::from_str("webkitSpeechRecognition")) - .ok() - .filter(|value| !value.is_undefined() && !value.is_null()) - }) - .ok_or_else(|| JsValue::from_str("Web Speech API recognition is not available in this browser context"))?; - let constructor = speech_recognition_ctor - .dyn_into::() - .map_err(|_| JsValue::from_str("SpeechRecognition constructor is not callable"))?; - let recognition = js_sys::Reflect::construct(&constructor, &js_sys::Array::new())?; - - js_sys::Reflect::set(&recognition, &JsValue::from_str("lang"), &JsValue::from_str("en-US"))?; - js_sys::Reflect::set(&recognition, &JsValue::from_str("interimResults"), &JsValue::TRUE)?; - js_sys::Reflect::set( - &recognition, - &JsValue::from_str("maxAlternatives"), - &JsValue::from_f64(1.0), - )?; - - Ok(SpeechRecognitionSession { - recognition, - stop_requested: Rc::new(Cell::new(false)), - }) - } - - pub async fn start(&self) -> Result { - self.stop_requested.set(false); - let recognition = self.recognition.clone(); - let stop_requested = self.stop_requested.clone(); - let promise = js_sys::Promise::new(&mut |resolve, reject| { - let settled = Rc::new(Cell::new(false)); - let resolve_for_result = resolve.clone(); - let resolve_for_end = resolve.clone(); - let reject_for_error = reject.clone(); - let reject_for_end = reject.clone(); - let settled_for_result = settled.clone(); - let settled_for_error = settled.clone(); - let settled_for_end = settled.clone(); - let transcript_state: Rc>> = Rc::new(RefCell::new(None)); - let transcript_state_for_result = transcript_state.clone(); - let transcript_state_for_end = transcript_state.clone(); - let stop_requested_for_end = stop_requested.clone(); - - let on_result = Closure::wrap(Box::new(move |event: JsValue| { - if let Some((transcript, confidence, has_final)) = extract_speech_event_transcript(&event) { - *transcript_state_for_result.borrow_mut() = Some((transcript.clone(), confidence)); - - if has_final && !settled_for_result.replace(true) { - let payload = js_sys::Object::new(); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("transcript"), - &JsValue::from_str(&transcript), - ); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("confidence"), - &JsValue::from_f64(confidence), - ); - let _ = resolve_for_result.call1(&JsValue::NULL, &payload); - } - } - }) as Box); - - let on_error = Closure::wrap(Box::new(move |event: JsValue| { - if settled_for_error.replace(true) { - return; - } - let message = js_sys::Reflect::get(&event, &JsValue::from_str("error")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "speech recognition failed".to_string()); - let _ = reject_for_error.call1(&JsValue::NULL, &JsValue::from_str(&message)); - }) as Box); - - let on_end = Closure::wrap(Box::new(move || { - if settled_for_end.replace(true) { - return; - } - if let Some((transcript, confidence)) = transcript_state_for_end.borrow().clone() { - let payload = js_sys::Object::new(); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("transcript"), - &JsValue::from_str(&transcript), - ); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("confidence"), - &JsValue::from_f64(confidence), - ); - let _ = resolve_for_end.call1(&JsValue::NULL, &payload); - } else if stop_requested_for_end.get() { - let _ = reject_for_end.call1( - &JsValue::NULL, - &JsValue::from_str("speech recognition stopped before any transcript was captured"), - ); - } else { - let _ = reject_for_end.call1( - &JsValue::NULL, - &JsValue::from_str("speech recognition ended without a transcript"), - ); - } - }) as Box); - - let _ = js_sys::Reflect::set( - &recognition, - &JsValue::from_str("onresult"), - on_result.as_ref().unchecked_ref(), - ); - let _ = js_sys::Reflect::set( - &recognition, - &JsValue::from_str("onerror"), - on_error.as_ref().unchecked_ref(), - ); - let _ = js_sys::Reflect::set( - &recognition, - &JsValue::from_str("onend"), - on_end.as_ref().unchecked_ref(), - ); - - if let Some(start) = js_sys::Reflect::get(&recognition, &JsValue::from_str("start")) - .ok() - .and_then(|value| value.dyn_into::().ok()) - { - let _ = start.call0(&recognition); - } else { - let _ = reject.call1( - &JsValue::NULL, - &JsValue::from_str("SpeechRecognition.start is not callable"), - ); - } - - on_result.forget(); - on_error.forget(); - on_end.forget(); - }); - - let result = JsFuture::from(promise).await?; - let transcript = js_sys::Reflect::get(&result, &JsValue::from_str("transcript"))? - .as_string() - .ok_or_else(|| JsValue::from_str("Speech recognition transcript missing"))?; - let confidence = js_sys::Reflect::get(&result, &JsValue::from_str("confidence"))? - .as_f64() - .unwrap_or(0.0); - - info!("Speech recognition captured transcript with confidence={}", confidence); - - Ok(SpeechRecognitionResult { transcript, confidence }) - } - - pub fn stop(&self) -> Result<(), JsValue> { - self.stop_requested.set(true); - let stop = js_sys::Reflect::get(&self.recognition, &JsValue::from_str("stop"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("SpeechRecognition.stop is not callable"))?; - stop.call0(&self.recognition)?; - Ok(()) - } -} - -#[wasm_bindgen] -impl NfcScanResult { - #[wasm_bindgen(js_name = scanOnce)] - pub async fn scan_once() -> Result { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let ndef_ctor = js_sys::Reflect::get(&window, &JsValue::from_str("NDEFReader")) - .ok() - .filter(|value| !value.is_undefined() && !value.is_null()) - .ok_or_else(|| JsValue::from_str("Web NFC is not available in this browser context"))?; - - let constructor = ndef_ctor - .dyn_into::() - .map_err(|_| JsValue::from_str("NDEFReader constructor is not callable"))?; - let reader = js_sys::Reflect::construct(&constructor, &js_sys::Array::new())?; - - let scan = js_sys::Reflect::get(&reader, &JsValue::from_str("scan"))? - .dyn_into::() - .map_err(|_| JsValue::from_str("NDEFReader.scan is not callable"))?; - let scan_promise = scan - .call0(&reader)? - .dyn_into::() - .map_err(|_| JsValue::from_str("NDEFReader.scan did not return a Promise"))?; - let _ = JsFuture::from(scan_promise).await?; - - let promise = js_sys::Promise::new(&mut |resolve, reject| { - let reject_for_timeout = reject.clone(); - let timeout_closure = Closure::once(Box::new(move || { - let _ = reject_for_timeout.call1( - &JsValue::NULL, - &JsValue::from_str("NFC scan timed out after 20 seconds"), - ); - }) as Box); - - if let Some(window) = web_sys::window() { - let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0( - timeout_closure.as_ref().unchecked_ref(), - 20_000, - ); - } - - let reject_for_error = reject.clone(); - - let on_reading = Closure::once(Box::new(move |event: JsValue| { - let serial_number = js_sys::Reflect::get(&event, &JsValue::from_str("serialNumber")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()); - let record_summary = summarize_ndef_records(&event); - - let payload = js_sys::Object::new(); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("serialNumber"), - &JsValue::from_str(&serial_number), - ); - let _ = js_sys::Reflect::set( - &payload, - &JsValue::from_str("recordSummary"), - &JsValue::from_str(&record_summary), - ); - let _ = resolve.call1(&JsValue::NULL, &payload); - }) as Box); - - let on_reading_error = Closure::once(Box::new(move |event: JsValue| { - let message = js_sys::Reflect::get(&event, &JsValue::from_str("message")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "NFC reading failed".to_string()); - let _ = reject_for_error.call1(&JsValue::NULL, &JsValue::from_str(&message)); - }) as Box); - - let _ = js_sys::Reflect::set( - &reader, - &JsValue::from_str("onreading"), - on_reading.as_ref().unchecked_ref(), - ); - let _ = js_sys::Reflect::set( - &reader, - &JsValue::from_str("onreadingerror"), - on_reading_error.as_ref().unchecked_ref(), - ); - - on_reading.forget(); - on_reading_error.forget(); - timeout_closure.forget(); - }); - - let result = JsFuture::from(promise).await?; - let serial_number = js_sys::Reflect::get(&result, &JsValue::from_str("serialNumber"))? - .as_string() - .unwrap_or_else(|| "unknown".to_string()); - let record_summary = js_sys::Reflect::get(&result, &JsValue::from_str("recordSummary"))? - .as_string() - .unwrap_or_else(|| "no records".to_string()); - - info!( - "NFC scan captured: serial_number={} summary={}", - serial_number, record_summary - ); - - Ok(NfcScanResult { - serial_number, - record_summary, - }) - } - - #[wasm_bindgen(js_name = serialNumber)] - pub fn serial_number(&self) -> String { - self.serial_number.clone() - } - - #[wasm_bindgen(js_name = recordSummary)] - pub fn record_summary(&self) -> String { - self.record_summary.clone() - } -} - -fn get_media_devices(navigator: &web_sys::Navigator) -> Result { - let media_devices = js_sys::Reflect::get(navigator, &JsValue::from_str("mediaDevices"))?; - - if media_devices.is_undefined() || media_devices.is_null() { - return Err(JsValue::from_str( - "navigator.mediaDevices is unavailable. Use https://... or http://localhost and allow access.", - )); - } - - media_devices - .dyn_into::() - .map_err(|_| JsValue::from_str("navigator.mediaDevices is not accessible in this browser")) -} - -async fn detect_webgpu_info() -> Result, JsValue> { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let navigator = window.navigator(); - let gpu = js_sys::Reflect::get(&navigator, &JsValue::from_str("gpu"))?; - - if gpu.is_null() || gpu.is_undefined() { - return Ok(None); - } - - let request_adapter = match js_sys::Reflect::get(&gpu, &JsValue::from_str("requestAdapter")) - .ok() - .and_then(|value| value.dyn_into::().ok()) - { - Some(request_adapter) => request_adapter, - None => return Ok(None), - }; - - let adapter_promise = request_adapter - .call0(&gpu)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestAdapter did not return a Promise"))?; - let adapter = JsFuture::from(adapter_promise).await?; - - if adapter.is_null() || adapter.is_undefined() { - return Ok(None); - } - - let info_object = if let Some(request_adapter_info) = - js_sys::Reflect::get(&adapter, &JsValue::from_str("requestAdapterInfo")) - .ok() - .and_then(|value| value.dyn_into::().ok()) - { - let info_promise = request_adapter_info - .call0(&adapter)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestAdapterInfo did not return a Promise"))?; - JsFuture::from(info_promise).await? - } else { - js_sys::Reflect::get(&adapter, &JsValue::from_str("info"))? - }; - - if info_object.is_null() || info_object.is_undefined() { - return Ok(None); - } - - let vendor = js_string_field(&info_object, "vendor"); - let architecture = js_string_field(&info_object, "architecture"); - let description = js_string_field(&info_object, "description"); - let device = js_string_field(&info_object, "device"); - let renderer = if device.is_empty() { description.clone() } else { device }; - - Ok(Some(GpuInfo { - vendor: string_or_unknown(vendor), - renderer: string_or_unknown(renderer), - architecture: string_or_unknown(architecture), - description: string_or_unknown(description), - source: "webgpu".to_string(), - })) -} - -fn detect_webgl_info() -> Result, JsValue> { - let window = web_sys::window().ok_or_else(|| JsValue::from_str("No window available"))?; - let document = window - .document() - .ok_or_else(|| JsValue::from_str("No document available"))?; - let canvas = document - .create_element("canvas")? - .dyn_into::() - .map_err(|_| JsValue::from_str("Failed to create canvas element"))?; - - let context = canvas - .get_context("webgl")? - .or_else(|| canvas.get_context("webgl2").ok().flatten()); - - let Some(context) = context else { - return Ok(None); - }; - - let get_extension = match js_sys::Reflect::get(&context, &JsValue::from_str("getExtension")) - .ok() - .and_then(|value| value.dyn_into::().ok()) - { - Some(get_extension) => get_extension, - None => return Ok(None), - }; - - let extension = get_extension.call1(&context, &JsValue::from_str("WEBGL_debug_renderer_info"))?; - if extension.is_null() || extension.is_undefined() { - return Ok(None); - } - - let get_parameter = match js_sys::Reflect::get(&context, &JsValue::from_str("getParameter")) - .ok() - .and_then(|value| value.dyn_into::().ok()) - { - Some(get_parameter) => get_parameter, - None => return Ok(None), - }; - - let vendor_enum = js_sys::Reflect::get(&extension, &JsValue::from_str("UNMASKED_VENDOR_WEBGL"))?; - let renderer_enum = js_sys::Reflect::get(&extension, &JsValue::from_str("UNMASKED_RENDERER_WEBGL"))?; - - let vendor = get_parameter - .call1(&context, &vendor_enum) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()); - let renderer = get_parameter - .call1(&context, &renderer_enum) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()); - - Ok(Some(GpuInfo { - vendor, - renderer: renderer.clone(), - architecture: "unknown".to_string(), - description: renderer, - source: "webgl_debug_renderer_info".to_string(), - })) -} - -fn js_string_field(value: &JsValue, field: &str) -> String { - js_sys::Reflect::get(value, &JsValue::from_str(field)) - .ok() - .and_then(|field_value| field_value.as_string()) - .unwrap_or_default() -} - -fn string_or_unknown(value: String) -> String { - if value.is_empty() { "unknown".to_string() } else { value } -} - -async fn request_sensor_permission(target: JsValue) -> Result { - if target.is_null() || target.is_undefined() { - return Ok(SENSOR_PERMISSION_GRANTED.to_string()); - } - - let request_permission = js_sys::Reflect::get(&target, &JsValue::from_str("requestPermission"))?; - if request_permission.is_null() || request_permission.is_undefined() { - return Ok(SENSOR_PERMISSION_GRANTED.to_string()); - } - - let request_permission = request_permission - .dyn_into::() - .map_err(|_| JsValue::from_str("requestPermission is not callable"))?; - let promise = request_permission - .call0(&target)? - .dyn_into::() - .map_err(|_| JsValue::from_str("requestPermission did not return a Promise"))?; - let result = JsFuture::from(promise).await?; - Ok(result - .as_string() - .unwrap_or_else(|| SENSOR_PERMISSION_GRANTED.to_string())) -} - -fn js_number_field(value: &JsValue, field: &str) -> Option { +pub fn js_number_field(value: &JsValue, field: &str) -> Option { js_sys::Reflect::get(value, &JsValue::from_str(field)) .ok() .and_then(|field_value| field_value.as_f64()) } -fn js_bool_field(value: &JsValue, field: &str) -> Option { +pub fn js_bool_field(value: &JsValue, field: &str) -> Option { js_sys::Reflect::get(value, &JsValue::from_str(field)) .ok() .and_then(|field_value| field_value.as_bool()) } -fn js_nested_object(value: &JsValue, field: &str) -> Option { +pub fn js_nested_object(value: &JsValue, field: &str) -> Option { js_sys::Reflect::get(value, &JsValue::from_str(field)) .ok() .filter(|nested| !nested.is_null() && !nested.is_undefined()) } -fn extract_speech_event_transcript(event: &JsValue) -> Option<(String, f64, bool)> { - let results = js_sys::Reflect::get(event, &JsValue::from_str("results")).ok()?; - let length = js_sys::Reflect::get(&results, &JsValue::from_str("length")) - .ok()? - .as_f64()? as u32; - - let mut transcript_parts = Vec::new(); - let mut confidence = 0.0; - let mut confidence_count = 0_u32; - let mut has_final = false; - - for index in 0..length { - let result = match js_sys::Reflect::get(&results, &JsValue::from_f64(index as f64)) { - Ok(result) => result, - Err(_) => continue, - }; - - let alternative = match js_sys::Reflect::get(&result, &JsValue::from_f64(0.0)) { - Ok(alternative) => alternative, - Err(_) => continue, - }; - - if let Some(part) = js_sys::Reflect::get(&alternative, &JsValue::from_str("transcript")) - .ok() - .and_then(|value| value.as_string()) - { - let trimmed = part.trim(); - if !trimmed.is_empty() { - transcript_parts.push(trimmed.to_string()); - } - } - - if let Some(value) = js_sys::Reflect::get(&alternative, &JsValue::from_str("confidence")) - .ok() - .and_then(|value| value.as_f64()) - { - confidence += value; - confidence_count += 1; - } - - if js_sys::Reflect::get(&result, &JsValue::from_str("isFinal")) - .ok() - .and_then(|value| value.as_bool()) - .unwrap_or(false) - { - has_final = true; - } - } - - if transcript_parts.is_empty() { - return None; - } - - let transcript = transcript_parts.join(" "); - let average_confidence = if confidence_count == 0 { - 0.0 - } else { - confidence / confidence_count as f64 - }; - - Some((transcript, average_confidence, has_final)) -} - -fn summarize_ndef_records(event: &JsValue) -> String { - let message = match js_sys::Reflect::get(event, &JsValue::from_str("message")) { - Ok(message) => message, - Err(_) => return "no message".to_string(), - }; - let records = match js_sys::Reflect::get(&message, &JsValue::from_str("records")) { - Ok(records) => records, - Err(_) => return "no records".to_string(), - }; - let length = match js_sys::Reflect::get(&records, &JsValue::from_str("length")) - .ok() - .and_then(|value| value.as_f64()) - { - Some(length) => length as u32, - None => return "no records".to_string(), - }; - - let mut summary = Vec::new(); - for index in 0..length { - let record = match js_sys::Reflect::get(&records, &JsValue::from_f64(index as f64)) { - Ok(record) => record, - Err(_) => continue, - }; - let record_type = js_sys::Reflect::get(&record, &JsValue::from_str("recordType")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_else(|| "unknown".to_string()); - let media_type = js_sys::Reflect::get(&record, &JsValue::from_str("mediaType")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_default(); - let id = js_sys::Reflect::get(&record, &JsValue::from_str("id")) - .ok() - .and_then(|value| value.as_string()) - .unwrap_or_default(); - - let mut parts = vec![format!("type={record_type}")]; - if !media_type.is_empty() { - parts.push(format!("media={media_type}")); - } - if !id.is_empty() { - parts.push(format!("id={id}")); - } - summary.push(parts.join(",")); - } - - if summary.is_empty() { - "no records".to_string() - } else { - summary.join(" | ") - } -} - // WebSocket client configuration #[wasm_bindgen] pub struct WsClientConfig {