diff --git a/Cargo.lock b/Cargo.lock index 2b2ded08..83bbf76f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -147,6 +147,7 @@ source = "git+https://github.com/embassy-rs/embassy?rev=1c466b81e6af6b34b1f70631 dependencies = [ "cfg-if", "critical-section", + "defmt", "document-features", "embassy-time-driver", "embassy-time-queue-driver", @@ -302,6 +303,15 @@ dependencies = [ "heapless", ] +[[package]] +name = "hyped_can" +version = "0.1.0" +dependencies = [ + "embassy-sync 0.6.0", + "embassy-time", + "heapless", +] + [[package]] name = "hyped_config" version = "0.1.0" diff --git a/boards/stm32f767zi/src/io.rs b/boards/stm32f767zi/src/io.rs index e7d410f4..530e6b05 100644 --- a/boards/stm32f767zi/src/io.rs +++ b/boards/stm32f767zi/src/io.rs @@ -3,7 +3,6 @@ use embassy_stm32::adc::{Adc, AnyAdcChannel, Instance}; use embassy_stm32::gpio::{Input, Output}; use embassy_stm32::{i2c::I2c, mode::Blocking}; use embassy_sync::blocking_mutex::{raw::NoopRawMutex, Mutex}; - use hyped_adc::HypedAdc; use hyped_adc_derive::HypedAdc; use hyped_gpio::{HypedGpioInputPin, HypedGpioOutputPin}; diff --git a/config/pods.yaml b/config/pods.yaml index de7f2b08..a41ccf5a 100644 --- a/config/pods.yaml +++ b/config/pods.yaml @@ -1,20 +1,432 @@ pods: - pod_1: - name: 'Pod 1' + poddington: + label: 'Poddington' + mode: 'LEVITATION_ONLY' measurements: - keyence: - name: 'Keyence' - unit: 'number of stripes' - format: 'integer' - limits: - critical: - min: 0 - max: 16 accelerometer_1: - name: 'Accelerometer 1' - unit: 'm/s^2' - format: 'float' - limits: - critical: - min: -150 - max: 150 + label: 'Accelerometer 1' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: -150 + high: 150 + accelerometer_2: + label: 'Accelerometer 2' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: -150 + high: 150 + accelerometer_3: + label: 'Accelerometer 3' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: -150 + high: 150 + accelerometer_4: + label: 'Accelerometer 4' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: -150 + high: 150 + accelerometer_avg: + label: 'Accelerometer Average' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: -150 + high: 150 + displacement: + label: 'Displacement' + type: 'displacement' + unit: 'm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + velocity: + label: 'Velocity' + type: 'velocity' + unit: 'm/s' + format: 'float' + limits: + critical: + low: 0 + high: 50 + acceleration: + label: 'Acceleration' + type: 'acceleration' + unit: 'm/s²' + format: 'float' + limits: + critical: + low: 0 + high: 5 + pressure_back_pull: + label: 'Pressure – Back Pull' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 5.5 + warning: + low: -0.19 + high: 5.2 + pressure_front_pull: + label: 'Pressure – Front Pull' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 5.5 + warning: + low: -0.19 + high: 5.2 + pressure_front_push: + label: 'Pressure – Front Push' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 5.5 + warning: + low: -0.19 + high: 5.2 + pressure_back_push: + label: 'Pressure – Back Push' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 5.5 + warning: + low: -0.19 + high: 5.2 + pressure_brakes_reservoir: + label: 'Pressure – Brakes Reservoir' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: 3 + high: 7.4 + warning: + low: 3.5 + high: 6.9 + pressure_active_suspension_reservoir: + label: 'Pressure – Active Suspension Reservoir' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: 3 + high: 7.4 + warning: + low: 3.5 + high: 6.9 + pressure_front_brake: + label: 'Pressure – Front Brake' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 4.2 + warning: + low: -0.19 + high: 4 + pressure_back_brake: + label: 'Pressure – Back Brake' + type: 'pressure' + unit: 'bar' + format: 'float' + limits: + critical: + low: -0.2 + high: 4.2 + warning: + low: -0.19 + high: 4 + thermistor_1: + label: 'Thermistor 1' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_2: + label: 'Thermistor 2' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_3: + label: 'Thermistor 3' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_4: + label: 'Thermistor 4' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_5: + label: 'Thermistor 5' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_6: + label: 'Thermistor 6' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_7: + label: 'Thermistor 7' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_8: + label: 'Thermistor 8' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_9: + label: 'Thermistor 9' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_10: + label: 'Thermistor 10' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_11: + label: 'Thermistor 11' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + thermistor_12: + label: 'Thermistor 12' + type: 'temperature' + unit: '°C' + format: 'float' + limits: + critical: + low: 0 + high: 100 + hall_effect_1: + label: 'Hall Effect 1' + type: 'magnetism' + unit: 'mT' + format: 'float' + limits: + critical: + low: -100 + high: 100 + hall_effect_2: + label: 'Hall Effect 2' + type: 'magnetism' + unit: 'mT' + format: 'float' + limits: + critical: + low: -100 + high: 100 + keyence_1: + label: 'Keyence 1' + type: 'keyence' + unit: 'number of stripes' + format: 'integer' + limits: + critical: + low: 0 + high: 16 + warning: + low: 5 + high: 10 + keyence_2: + label: 'Keyence 2' + type: 'keyence' + unit: 'number of stripes' + format: 'integer' + limits: + critical: + low: 0 + high: 16 + warning: + low: 5 + high: 10 + power_line_resistance: + label: 'Power Line Resistance' + type: 'resistance' + unit: 'kΩ' + format: 'integer' + limits: + critical: + low: 0 + high: 100 + levitation_height_1: + label: 'Levitation Height 1' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + levitation_height_2: + label: 'Levitation Height 2' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + levitation_height_3: + label: 'Levitation Height 3' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + levitation_height_4: + label: 'Levitation Height 4' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + levitation_height_lateral_1: + label: 'Levitation Height Lateral 1' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + levitation_height_lateral_2: + label: 'Levitation Height Lateral 2' + type: 'levitation' + unit: 'mm' + format: 'float' + limits: + critical: + low: 0 + high: 100 + statuses: + brake_clamp_status: + label: 'Brake Clamp Status' + type: 'binary-status' + format: 'enum' + values: + - value: 1 + label: 'CLAMPED' + - value: 0 + label: 'UNCLAMPED' + pod_raised_status: + label: 'Pod Raised Status' + type: 'binary-status' + format: 'enum' + values: + - value: 1 + label: 'RAISED' + - value: 0 + label: 'LOWERED' + battery_status: + label: 'Battery Status' + type: 'binary-status' + format: 'enum' + values: + - value: 1 + label: 'HEALTHY' + - value: 0 + label: 'UNHEALTHY' + motor_controller_status: + label: 'Motor Controller Status' + type: 'binary-status' + format: 'enum' + values: + - value: 1 + label: 'HEALTHY' + - value: 0 + label: 'UNHEALTHY' + high_power_status: + label: 'High Power Status' + type: 'binary-status' + format: 'enum' + values: + - value: 1 + label: 'ACTIVE' + - value: 0 + label: 'OFF' \ No newline at end of file diff --git a/config/src/lib.rs b/config/src/lib.rs index c699a3cf..f16ad36c 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -19,13 +19,13 @@ pub enum LimitLevel { #[derive(Debug, Serialize, Deserialize)] pub struct MeasurementLimits { - min: f64, - max: f64, + low: f64, + high: f64, } #[derive(Debug, Serialize, Deserialize)] pub struct Measurement { - pub name: String, + pub label: String, pub unit: String, pub format: MeasurementFormat, pub limits: HashMap, @@ -33,7 +33,7 @@ pub struct Measurement { #[derive(Debug, Serialize, Deserialize)] pub struct Pod { - pub name: String, + pub label: String, pub measurements: HashMap, } @@ -80,38 +80,38 @@ mod tests { let raw_config = r#" pods: pod_1: - name: 'Pod 1' + label: 'Pod 1' measurements: keyence: - name: 'Keyence' + label: 'Keyence' unit: 'number of stripes' format: 'integer' limits: critical: - min: 0 - max: 16 + low: 0 + high: 16 accelerometer_1: - name: 'Accelerometer 1' + label: 'Accelerometer 1' unit: 'm/s^2' format: 'float' limits: critical: - min: -150 - max: 150 + low: -150 + high: 150 "#; let config = PodConfig::new(raw_config).unwrap(); assert_eq!(config.pod_ids, vec!["pod_1"]); let pod = config.pods.get("pod_1").unwrap(); - assert_eq!(pod.name, "Pod 1"); + assert_eq!(pod.label, "Pod 1"); assert_eq!(pod.measurements.len(), 2); let keyence = pod.measurements.get("keyence").unwrap(); - assert_eq!(keyence.name, "Keyence"); + assert_eq!(keyence.label, "Keyence"); assert_eq!(keyence.unit, "number of stripes"); assert_eq!(keyence.format, MeasurementFormat::Int); assert_eq!(keyence.limits.len(), 1); let keyence_limits = keyence.limits.get(&LimitLevel::Critical).unwrap(); - assert_eq!(keyence_limits.min, 0.0); - assert_eq!(keyence_limits.max, 16.0); + assert_eq!(keyence_limits.low, 0.0); + assert_eq!(keyence_limits.high, 16.0); } #[test] @@ -119,27 +119,27 @@ mod tests { let raw_config = r#" pods: pod_1: - name: 'Pod 1' + label: 'Pod 1' measurements: keyence: - name: 'Keyence' + label: 'Keyence' unit: 'number of stripes' format: 'integer' limits: critical: - min: 0 - max: 16 + low: 0 + high: 16 pod_2: - name: 'Pod 2' + label: 'Pod 2' measurements: accelerometer_1: - name: 'Accelerometer 1' + label: 'Accelerometer 1' unit: 'm/s^2' format: 'float' limits: critical: - min: -150 - max: 150 + low: -150 + high: 150 "#; let config = PodConfig::new(raw_config).unwrap(); assert!(config.pod_ids.len() == 2); @@ -147,9 +147,9 @@ mod tests { assert!(config.pod_ids[0] == "pod_2" || config.pod_ids[1] == "pod_2"); let pod1 = config.pods.get("pod_1").unwrap(); let pod2 = config.pods.get("pod_2").unwrap(); - assert_eq!(pod1.name, "Pod 1"); + assert_eq!(pod1.label, "Pod 1"); assert_eq!(pod1.measurements.len(), 1); - assert_eq!(pod2.name, "Pod 2"); + assert_eq!(pod2.label, "Pod 2"); assert_eq!(pod2.measurements.len(), 1); } @@ -158,55 +158,55 @@ mod tests { let raw_config = r#" pods: pod_1: - name: 'Pod 1' + label: 'Pod 1' measurements: keyence: - name: 'Keyence' + label: 'Keyence' unit: 'number of stripes' format: 'integer' limits: warning: - min: 0 - max: 16 + low: 0 + high: 16 accelerometer_1: - name: 'Accelerometer 1' + label: 'Accelerometer 1' unit: 'm/s^2' format: 'float' limits: critical: - min: -150 - max: 150 + low: -150 + high: 150 temperature: - name: 'Temperature' + label: 'Temperature' unit: 'C' format: 'float' limits: warning: - min: 0 - max: 50 + low: 0 + high: 50 critical: - min: -20 - max: 80 + low: -20 + high: 80 "#; let config = PodConfig::new(raw_config).unwrap(); let pod = config.pods.get("pod_1").unwrap(); let keyence = pod.measurements.get("keyence").unwrap(); assert_eq!(keyence.limits.len(), 1); - assert_eq!(keyence.limits.get(&LimitLevel::Warning).unwrap().min, 0.0); + assert_eq!(keyence.limits.get(&LimitLevel::Warning).unwrap().low, 0.0); let accelerometer = pod.measurements.get("accelerometer_1").unwrap(); assert_eq!(accelerometer.limits.len(), 1); assert_eq!( - accelerometer.limits.get(&LimitLevel::Critical).unwrap().min, + accelerometer.limits.get(&LimitLevel::Critical).unwrap().low, -150.0 ); let temperature = pod.measurements.get("temperature").unwrap(); assert_eq!(temperature.limits.len(), 2); assert_eq!( - temperature.limits.get(&LimitLevel::Warning).unwrap().max, + temperature.limits.get(&LimitLevel::Warning).unwrap().high, 50.0 ); assert_eq!( - temperature.limits.get(&LimitLevel::Critical).unwrap().max, + temperature.limits.get(&LimitLevel::Critical).unwrap().high, 80.0 ); } diff --git a/lib/core/src/can_sendable.rs b/lib/core/src/can_sendable.rs new file mode 100644 index 00000000..b3e80484 --- /dev/null +++ b/lib/core/src/can_sendable.rs @@ -0,0 +1,287 @@ +use defmt::error; + +pub enum CanMessageType { + Bool = 0, + F32 = 1, + TwoU16 = 2, + PosDelta = 3, +} + +impl From for CanMessageType { + fn from(val: u8) -> Self { + match val { + 0 => CanMessageType::Bool, + 1 => CanMessageType::F32, + 2 => CanMessageType::TwoU16, + 3 => CanMessageType::PosDelta, + _ => { + error!("Unknown CanMessageType: {}", val); + panic!(); + } + } + } +} + +/// Checks if a CAN message is a valid message +pub fn is_valid_can_msg(msg: &[u8; 8]) -> bool { + // we assume that board id is valid as ATM we dont know how many boards sending messages there are + (msg[0] & 0x0F) < 4 +} + +/// Extracts the message type from a CAN message +pub fn can_msg_type_from_u8(msg: &[u8; 8]) -> CanMessageType { + CanMessageType::from(msg[0] & 0x0F) +} + +/// Extracts the board id from a CAN message +pub fn get_board_id(msg: &[u8; 8]) -> u8 { + msg[0] >> 4 +} + +/// Builds a CAN header byte from a board id and message type +pub fn build_can_header(board_id: u8, msg_type: CanMessageType) -> u8 { + (board_id << 4) | (msg_type as u8) +} + +pub trait CanSendable { + fn encode_to_can(&self, board_id: u8) -> [u8; 8]; + fn can_decode(can_msg: &[u8; 8]) -> Self; +} + +impl CanSendable for bool { + fn encode_to_can(&self, board_id: u8) -> [u8; 8] { + let mut data: [u8; 8] = [0; 8]; + data[0] = build_can_header(board_id, CanMessageType::Bool); + data[1] = *self as u8; + data + } + + fn can_decode(can_msg: &[u8; 8]) -> Self { + can_msg[1] != 0 + } +} + +impl CanSendable for [u16; 2] { + fn encode_to_can(&self, board_id: u8) -> [u8; 8] { + let mut data: [u8; 8] = [0; 8]; + data[0] = build_can_header(board_id, CanMessageType::TwoU16); + + let u16_bytes: [u8; 2] = self[0].to_le_bytes(); + data[1..3].copy_from_slice(&u16_bytes); + + let u16_bytes: [u8; 2] = self[1].to_le_bytes(); + data[3..5].copy_from_slice(&u16_bytes); + + data + } + + fn can_decode(can_msg: &[u8; 8]) -> Self { + let mut u16_bytes: [u8; 2] = [0; 2]; + u16_bytes.copy_from_slice(&can_msg[1..3]); + let u16_1 = u16::from_le_bytes(u16_bytes); + + u16_bytes.copy_from_slice(&can_msg[3..5]); + let u16_2 = u16::from_le_bytes(u16_bytes); + + [u16_1, u16_2] + } +} + +impl CanSendable for f32 { + fn encode_to_can(&self, board_id: u8) -> [u8; 8] { + let mut data: [u8; 8] = [0; 8]; + data[0] = build_can_header(board_id, CanMessageType::F32); + + let f32_bytes: [u8; 4] = self.to_le_bytes(); + data[1..5].copy_from_slice(&f32_bytes); + data + } + + fn can_decode(can_msg: &[u8; 8]) -> Self { + let mut f32_bytes: [u8; 4] = [0; 4]; + f32_bytes.copy_from_slice(&can_msg[1..5]); + f32::from_le_bytes(f32_bytes) + } +} + +// Struct to hold f32 so they can be sent and decoded over CAN +pub struct PositionDelta { + pub clock: Option, + pub x: Option, + pub y: Option, + pub z: Option, +} + +impl PositionDelta { + // sender side code + pub fn new(clock: u8, x: f32, y: f32, z: f32) -> Self { + PositionDelta { + clock: Some(clock), + x: Some(x), + y: Some(y), + z: Some(z), + } + } + + // converts a provided F32 + pub fn encode_to_can(&self, board_id: u8) -> [[u8; 8]; 3] { + assert!( + self.is_complete(), + "Attempted to encode an incomplete PositionDelta" + ); + + let mut x = self.x.unwrap().encode_to_can(board_id); + let mut y = self.y.unwrap().encode_to_can(board_id); + let mut z = self.z.unwrap().encode_to_can(board_id); + + x[0] = build_can_header(board_id, CanMessageType::PosDelta); + y[0] = build_can_header(board_id, CanMessageType::PosDelta); + z[0] = build_can_header(board_id, CanMessageType::PosDelta); + + x[5] = 0; + y[5] = 1; + z[5] = 2; + + let clock = self.clock.unwrap(); + x[6] = clock; + y[6] = clock; + z[6] = clock; + [x, y, z] + } +} + +impl PositionDelta { + // receiver code + + /// Create empty PositionDelta to fill + pub fn new_empty() -> Self { + PositionDelta { + clock: None, + x: None, + y: None, + z: None, + } + } + + /// Reset position delta to empty + pub fn clear(&mut self) { + self.clock = None; + self.x = None; + self.y = None; + self.z = None; + } + + /// Check we've received a completed PositionDelta, (all x,y,z all with the same clock value) + pub fn is_complete(&self) -> bool { + self.clock.is_some() && self.x.is_some() && self.y.is_some() && self.z.is_some() + } + + /// Returns finished f32 + pub fn return_complete(&self) -> Option<[f32; 3]> { + if !self.is_complete() { + None + } else { + Some([self.x.unwrap(), self.y.unwrap(), self.z.unwrap()]) + } + } + + /// Attempt to decode a CAN PositionDelta value and add it to the current structure + /// example implementation + /// ```NoRun // cos doctests dont like no_std + /// use hyped_core::can_sendable::*; + /// + /// let mut pos_d = PositionDelta::new_empty(); + /// let mut err_cnt = 0; + /// loop { + /// let next_val = [0; 8]; // get next CAN msg + /// if let Some(err) = pos_d.can_decode_step(next_val){ + /// match err { + /// PositionDeltaDecodeError::PositionAlreadyReceived => { /* ... throw error and panic */ } + /// PositionDeltaDecodeError::ClockOutdated => { /* we dont need to do anything*/ } + /// PositionDeltaDecodeError::ClockInFuture => { + /// err_cnt += 1; + /// pos_d.clear(); + /// let _ = pos_d.can_decode_step(next_val); // will never error on first call after clear + /// } + /// } + /// } + /// if (err_cnt > 3) { + /// // ... emergency exit + /// } + /// + /// if let Some(vals) = pos_d.return_complete() { + /// let err_cnt = 0; + /// // ... do something with complete values + /// } + /// } + /// ``` + pub fn can_decode_step(&mut self, step: [u8; 8]) -> Option { + let step_type = step[5]; + let step_clock = step[6]; + + if self.clock.is_none() { + self.clock = Some(step_clock); + } else if self.clock.unwrap() != step_clock { + return Some( + if Self::is_clock_infuture(self.clock.unwrap(), step_clock) { + PositionDeltaDecodeError::ClockInFuture + } else { + PositionDeltaDecodeError::ClockOutdated + }, + ); + } + + let step_data = f32::can_decode(&step); + match step_type { + 0 => { + if self.x.is_none() { + self.x = Some(step_data); + } else { + return Some(PositionDeltaDecodeError::PositionAlreadyReceived); + } + } + 1 => { + if self.y.is_none() { + self.y = Some(step_data); + } else { + return Some(PositionDeltaDecodeError::PositionAlreadyReceived); + } + } + 2 => { + if self.z.is_none() { + self.z = Some(step_data); + } else { + return Some(PositionDeltaDecodeError::PositionAlreadyReceived); + } + } + _ => { + error!("Unknown PositionDelta step type: {}", step_type); + panic!(); + } + } + None + } + + /// Checks if new clock is infront of the old + fn is_clock_infuture(old: u8, new: u8) -> bool { + let diff = new.wrapping_sub(old); + diff < 128 // assume any diff > 128 is a wraparound + } +} + +/// All the possible failure reasons for failing to decode a position delta step +pub enum PositionDeltaDecodeError { + /// The x y or z value for a given clock cycle has already been received. + /// This error shouldn't ever be seen in normal operation, if it is that + /// means that there is a **major issue** with the execution flow, either the + /// CAN sender of the board is sending repeats or were are somehow an + /// entire u8 out of sync (should be reason to stop the pod) + PositionAlreadyReceived, + /// The clock value we've received is in the future and so the current + /// cycle should be discarded. The no. of discarded cycles should be + /// recorded and if it reaches a given threshold (recommended 3~5) the + /// pod should come to a stop as its current readings are too out of date + ClockInFuture, + /// Clock cycle of an old clock cycle has been received + ClockOutdated, +} diff --git a/lib/core/src/lib.rs b/lib/core/src/lib.rs index 17226f9a..899f1920 100644 --- a/lib/core/src/lib.rs +++ b/lib/core/src/lib.rs @@ -1,5 +1,6 @@ #![cfg_attr(not(feature = "std"), no_std)] +pub mod can_sendable; pub mod format_string; pub mod log_types; pub mod mqtt; diff --git a/lib/io/hyped_adc/hyped_adc_derive/src/lib.rs b/lib/io/hyped_adc/hyped_adc_derive/src/lib.rs index 41d15543..d4621e1c 100644 --- a/lib/io/hyped_adc/hyped_adc_derive/src/lib.rs +++ b/lib/io/hyped_adc/hyped_adc_derive/src/lib.rs @@ -16,10 +16,14 @@ fn impl_hyped_adc(ast: &syn::DeriveInput) -> TokenStream { let (impl_generics, ty_generics, _) = generics.split_for_impl(); let gen = quote! { impl #impl_generics HypedAdc for #name #ty_generics { - /// Read a value from the ADC channel fn read_value(&mut self) -> u16 { self.adc.blocking_read(&mut self.channel) } + + fn get_resolution(&self) -> u16 { + /// STM32 boards have a resolution of 12 bits + 4096 + } } impl #impl_generics #name #ty_generics { diff --git a/lib/io/hyped_adc/src/lib.rs b/lib/io/hyped_adc/src/lib.rs index ab564e43..b6e60ca4 100644 --- a/lib/io/hyped_adc/src/lib.rs +++ b/lib/io/hyped_adc/src/lib.rs @@ -2,7 +2,10 @@ /// ADC trait used to abstract the ADC peripheral pub trait HypedAdc { + /// Read value from the ADC channel fn read_value(&mut self) -> u16; + /// Get resolution of ADC + fn get_resolution(&self) -> u16; } pub mod mock_adc { @@ -16,12 +19,15 @@ pub mod mock_adc { } impl crate::HypedAdc for MockAdc { - /// Reads a value from the ADC fn read_value(&mut self) -> u16 { let next_value: u16 = self.next_values.pop().unwrap_or(self.current_value); self.current_value = next_value; self.current_value } + + fn get_resolution(&self) -> u16 { + 4096 + } } impl MockAdc { diff --git a/lib/io/hyped_can/Cargo.toml b/lib/io/hyped_can/Cargo.toml new file mode 100644 index 00000000..8a040fd9 --- /dev/null +++ b/lib/io/hyped_can/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "hyped_can" +version = "0.1.0" +edition = "2021" + +[dependencies] +heapless = "0.8.0" +embassy-sync = { version = "0.6.0", features = ["defmt"], git = "https://github.com/embassy-rs/embassy", rev = "1c466b81e6af6b34b1f706318cc0870a459550b7"} +embassy-time = { version = "0.3.1", features = ["defmt", "defmt-timestamp-uptime", "tick-hz-32_768"], git = "https://github.com/embassy-rs/embassy", rev = "1c466b81e6af6b34b1f706318cc0870a459550b7"} \ No newline at end of file diff --git a/lib/io/hyped_can/src/lib.rs b/lib/io/hyped_can/src/lib.rs new file mode 100644 index 00000000..d4ae4308 --- /dev/null +++ b/lib/io/hyped_can/src/lib.rs @@ -0,0 +1,135 @@ +#![no_std] + +/// CAN errors that can occur +/// From: https://docs.embassy.dev/embassy-stm32/git/stm32f767zi/can/enums/enum.BusError.html, +/// https://docs.embassy.dev/embassy-stm32/git/stm32f767zi/can/enum.TryWriteError.html +/// https://docs.embassy.dev/embassy-stm32/git/stm32f767zi/can/enum.TryReadError.html, +/// and https://docs.embassy.dev/embassy-stm32/git/stm32f767zi/can/enums/enum.FrameCreateError.html +#[derive(Debug)] +pub enum CanError { + Stuff, + Form, + Acknowledge, + BitRecessive, + BitDominant, + Crc, + Software, + BusOff, + BusPassive, + BusWarning, + Full, + Empty, + Unknown, + NotEnoughData, + InvalidDataLength, + InvalidCanId, +} + +#[derive(Clone)] +pub struct HypedCanFrame { + pub can_id: u32, // 32 bit CAN_ID + EFF/RTR/ERR flags + pub data: [u8; 8], // data that is sent over CAN, split into bytes +} + +pub type Timestamp = embassy_time::Instant; + +#[derive(Clone)] +pub struct HypedEnvelope { + /// Reception time. + pub ts: Timestamp, + /// The actual CAN frame. + pub frame: HypedCanFrame, +} + +/// CAN trait used to abstract the CAN operations +pub trait HypedCan { + /// Attempts to read a CAN frame without blocking. + /// + /// Returns [Err(TryReadError::Empty)] if there are no frames in the rx queue. + fn read_frame(&mut self) -> Result; + /// Attempts to transmit a frame without blocking. + /// + /// Returns [Err(CanError::Full)] if the frame can not be queued for transmission now. + /// + /// The frame will only be accepted if there is no frame with the same priority already queued. This is done + /// to work around a hardware limitation that could lead to out-of-order delivery of frames with the same priority. + fn write_frame(&mut self, frame: &HypedCanFrame) -> Result<(), CanError>; +} + +pub mod mock_can { + use core::cell::RefCell; + use embassy_sync::blocking_mutex::{raw::CriticalSectionRawMutex, Mutex}; + use heapless::Deque; + + use crate::HypedCanFrame; + + /// A fixed-size map of CAN frames + type CanValues = Deque; + + /// A mock CAN instance which can be used for testing + pub struct MockCan<'a> { + /// Values that have been read from the CAN bus + frames_to_read: &'a Mutex>, + /// Values that have been sent over the CAN bus + frames_sent: CanValues, + /// Whether to fail reading frames + fail_read: &'a Mutex, + /// Whether to fail writing frames + fail_write: &'a Mutex, + } + + impl crate::HypedCan for MockCan<'_> { + fn read_frame(&mut self) -> Result { + if self.fail_read.lock(|fail_read| *fail_read) { + return Err(super::CanError::Unknown); + } + self.frames_to_read.lock(|frames_to_read| { + match frames_to_read.borrow_mut().pop_front() { + Some(frame) => Ok(super::HypedEnvelope { + ts: embassy_time::Instant::now(), + frame, + }), + None => Err(super::CanError::Empty), + } + }) + } + + fn write_frame(&mut self, frame: &super::HypedCanFrame) -> Result<(), super::CanError> { + if self.fail_write.lock(|fail_write| *fail_write) { + return Err(super::CanError::Unknown); + } + match self.frames_sent.push_front(frame.clone()) { + Ok(_) => Ok(()), + Err(_) => Err(super::CanError::Unknown), + } + } + } + + impl MockCan<'_> { + pub fn new( + frames_to_read: &'static Mutex>, + ) -> Self { + static FAIL_READ: Mutex = Mutex::new(false); + static FAIL_WRITE: Mutex = Mutex::new(false); + MockCan::new_with_failures(frames_to_read, &FAIL_READ, &FAIL_WRITE) + } + + pub fn new_with_failures( + frames_to_read: &'static Mutex>, + fail_read: &'static Mutex, + fail_write: &'static Mutex, + ) -> Self { + MockCan { + frames_to_read, + frames_sent: CanValues::new(), + fail_read, + fail_write, + } + } + + /// Get the values that have been sent over the CAN bus + pub fn get_can_frames(&self) -> &CanValues { + &self.frames_sent + } + } +} diff --git a/lib/io/hyped_i2c/hyped_i2c_derive/Cargo.lock b/lib/io/hyped_i2c/hyped_i2c_derive/Cargo.lock index 38de2875..724a162e 100644 --- a/lib/io/hyped_i2c/hyped_i2c_derive/Cargo.lock +++ b/lib/io/hyped_i2c/hyped_i2c_derive/Cargo.lock @@ -3,7 +3,7 @@ version = 3 [[package]] -name = "hyped_adc_derive" +name = "hyped_i2c_derive" version = "0.1.0" dependencies = [ "quote", diff --git a/lib/io/hyped_i2c/hyped_i2c_derive/src/lib.rs b/lib/io/hyped_i2c/hyped_i2c_derive/src/lib.rs index e9b93dcd..347df560 100644 --- a/lib/io/hyped_i2c/hyped_i2c_derive/src/lib.rs +++ b/lib/io/hyped_i2c/hyped_i2c_derive/src/lib.rs @@ -16,7 +16,7 @@ fn impl_hyped_i2c(ast: &syn::DeriveInput) -> TokenStream { let (impl_generics, ty_generics, _) = generics.split_for_impl(); let gen = quote! { impl #impl_generics HypedI2c for #name #ty_generics{ - /// Read a byte from a register on a device + fn read_byte(&mut self, device_address: u8, register_address: u8) -> Option { let mut read = [0]; let result = self.i2c.lock(|i2c| { @@ -32,26 +32,24 @@ fn impl_hyped_i2c(ast: &syn::DeriveInput) -> TokenStream { } } - /// Read a byte from a register with a 16-bit address on a device - fn read_byte_16(&mut self, device_address: u8, register_address: u16) -> Option { - let register_addr_hi = (register_address >> 8) as u8 & 0xFF; - let register_addr_lo = register_address as u8 & 0xFF; - let mut read = [0]; - let result = self.i2c.lock(|i2c| { - i2c.borrow_mut().blocking_write_read( - device_address, - [register_addr_hi, register_addr_lo].as_ref(), - &mut read, - ) - }); - match result { - Ok(_) => Some(read[0]), - Err(_) => None, - } - } + fn read_byte_16(&mut self, device_address: u8, register_address: u16) -> Option { + let register_addr_hi = (register_address >> 8) as u8 & 0xFF; + let register_addr_lo = register_address as u8 & 0xFF; + let mut read = [0]; + let result = self.i2c.lock(|i2c| { + i2c.borrow_mut().blocking_write_read( + device_address, + [register_addr_hi, register_addr_lo].as_ref(), + &mut read, + ) + }); + match result { + Ok(_) => Some(read[0]), + Err(_) => None, + } + } - /// Write a byte to a register on a device fn write_byte_to_register( &mut self, device_address: u8, @@ -76,7 +74,6 @@ fn impl_hyped_i2c(ast: &syn::DeriveInput) -> TokenStream { } } - /// Write a byte to a device fn write_byte(&mut self, device_address: u8, data: u8) -> Result<(), I2cError> { let result = self.i2c.lock(|i2c| { i2c.borrow_mut().blocking_write(device_address, [data].as_ref()) @@ -95,33 +92,31 @@ fn impl_hyped_i2c(ast: &syn::DeriveInput) -> TokenStream { } } - // Write a byte to a register with a 16-bit address on a device - fn write_byte_to_register_16( - &mut self, - device_address: u8, - register_address: u16, - data: u8, - ) -> Result<(), I2cError> { - let register_addr_hi = (register_address >> 8) as u8; - let register_addr_lo = register_address as u8; - let result = self.i2c.lock(|i2c| { - i2c.borrow_mut() - .blocking_write(device_address, [register_addr_hi, register_addr_lo, data].as_ref()) - }); - match result { - Ok(_) => Ok(()), - Err(e) => Err(match e { - embassy_stm32::i2c::Error::Bus => I2cError::Bus, - embassy_stm32::i2c::Error::Arbitration => I2cError::Arbitration, - embassy_stm32::i2c::Error::Nack => I2cError::Nack, - embassy_stm32::i2c::Error::Timeout => I2cError::Timeout, - embassy_stm32::i2c::Error::Crc => I2cError::Crc, - embassy_stm32::i2c::Error::Overrun => I2cError::Overrun, - embassy_stm32::i2c::Error::ZeroLengthTransfer => I2cError::ZeroLengthTransfer, - }), - } - } - + fn write_byte_to_register_16( + &mut self, + device_address: u8, + register_address: u16, + data: u8, + ) -> Result<(), I2cError> { + let register_addr_hi = (register_address >> 8) as u8; + let register_addr_lo = register_address as u8; + let result = self.i2c.lock(|i2c| { + i2c.borrow_mut() + .blocking_write(device_address, [register_addr_hi, register_addr_lo, data].as_ref()) + }); + match result { + Ok(_) => Ok(()), + Err(e) => Err(match e { + embassy_stm32::i2c::Error::Bus => I2cError::Bus, + embassy_stm32::i2c::Error::Arbitration => I2cError::Arbitration, + embassy_stm32::i2c::Error::Nack => I2cError::Nack, + embassy_stm32::i2c::Error::Timeout => I2cError::Timeout, + embassy_stm32::i2c::Error::Crc => I2cError::Crc, + embassy_stm32::i2c::Error::Overrun => I2cError::Overrun, + embassy_stm32::i2c::Error::ZeroLengthTransfer => I2cError::ZeroLengthTransfer, + }), + } + } } impl #impl_generics #name #ty_generics { diff --git a/lib/io/hyped_i2c/src/lib.rs b/lib/io/hyped_i2c/src/lib.rs index ff5703ca..507c4055 100644 --- a/lib/io/hyped_i2c/src/lib.rs +++ b/lib/io/hyped_i2c/src/lib.rs @@ -18,20 +18,25 @@ pub enum I2cError { /// I2C trait used to abstract the I2C peripheral pub trait HypedI2c { + /// Read a byte from a register on a device fn read_byte(&mut self, device_address: u8, register_address: u8) -> Option; + /// Read a byte from a 16-bit register on a device fn read_byte_16(&mut self, device_address: u8, register_address: u16) -> Option; + /// Write a byte to a register on a device fn write_byte_to_register( &mut self, device_address: u8, register_address: u8, data: u8, ) -> Result<(), I2cError>; + // Write a byte to a 16-bit register on a device fn write_byte_to_register_16( &mut self, device_address: u8, register_address: u16, data: u8, ) -> Result<(), I2cError>; + /// Write a byte to a device fn write_byte(&mut self, device_address: u8, data: u8) -> Result<(), I2cError>; } diff --git a/lib/localisation/src/control.rs b/lib/localisation/src/control.rs index 0f450390..eea187d4 100644 --- a/lib/localisation/src/control.rs +++ b/lib/localisation/src/control.rs @@ -1 +1 @@ -pub mod navigator; +pub mod localizer; diff --git a/lib/localisation/src/control/localizer.rs b/lib/localisation/src/control/localizer.rs new file mode 100644 index 00000000..0b66340c --- /dev/null +++ b/lib/localisation/src/control/localizer.rs @@ -0,0 +1,202 @@ +use crate::{ + filtering::kalman_filter::KalmanFilter, + preprocessing::{ + accelerometer::AccelerometerPreprocessor, + keyence::{KeyenceAgrees, SensorChecks}, + optical::process_optical_data, + }, + types::{RawAccelerometerData, NUM_ACCELEROMETERS, NUM_AXIS}, +}; + +use heapless::Vec; + +use libm::pow; +use nalgebra::{Matrix2, Vector1, Vector2}; + +//TODOLater: Confirm values are correct + +// Time step (s) +const DELTA_T: f64 = 0.01; + +// Stripe width (m) +const STRIPE_WIDTH: f64 = 1.0; + +pub struct Localizer { + displacement: f64, + velocity: f64, + previous_velocity: f64, + acceleration: f64, + kalman_filter: KalmanFilter, + keyence_checker: KeyenceAgrees, + keyence_val: f64, + optical_val: f64, + accelerometer_val: f64, + accelerometer_preprocessor: AccelerometerPreprocessor, +} + +impl Localizer { + pub fn new() -> Localizer { + let initial_state = Vector2::new(0.0, 0.0); + let initial_covariance = Matrix2::new(1.0, 0.0, 0.0, 1.0); + let transition_matrix = Matrix2::new(1.0, DELTA_T, 0.0, 1.0); + let control_matrix = Vector2::new(0.5 * DELTA_T * DELTA_T, DELTA_T); + let observation_matrix = Matrix2::new(1.0, 0.0, 0.0, DELTA_T); + + // Assuming frequency of 6400hz for IMU at 120 mu g / sqrt(Hz) + // standard deviation = 120 * sqrt(6400) = 9600 mu g = 0.0096 g + // = 0.0096 * 9.81 = 0.094176 m/s^2 + // variance = 0.094176^2 = 0.0089 m/s^2 + + let process_noise: Matrix2 = Matrix2::new( + 0.25 * pow(DELTA_T, 4.0), + 0.5 * pow(DELTA_T, 3.0), + 0.5 * pow(DELTA_T, 3.0), + pow(DELTA_T, 2.0) * 0.0089, + ); + + // We assume the stripe counter is accurate + // Optical flow expects standard deviation of 0.01% of the measured value + // Assuming top speed 10m/s, + // standard deviation = 0.01 * 10 = 0.1 m/s + // variance = 0.1^2 = 0.01 m/s^2 + + let measurement_noise: Matrix2 = Matrix2::new(0.01, 0.0, 0.0, 0.0); + + let kalman_filter = KalmanFilter::new( + initial_state, + initial_covariance, + transition_matrix, + control_matrix, + observation_matrix, + process_noise, + measurement_noise, + ); + + Localizer { + displacement: 0.0, + velocity: 0.0, + previous_velocity: 0.0, + acceleration: 0.0, + kalman_filter, + keyence_checker: KeyenceAgrees::new(), + keyence_val: 0.0, + optical_val: 0.0, + accelerometer_val: 0.0, + accelerometer_preprocessor: AccelerometerPreprocessor::new(), + } + } +} + +impl Default for Localizer { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug)] +pub enum PreprocessorError { + KeyenceUnacceptable, + AccelerometerUnnaceptable, +} + +impl Localizer { + /// Preprocesses the data from the sensors, checking for errors and outliers + pub fn preprocessor( + &mut self, + optical_data: Vec, + keyence_data: Vec, + accelerometer_data: RawAccelerometerData, + ) -> Result<(), PreprocessorError> { + let processed_optical_data = + process_optical_data(Vec::from_slice(&[optical_data.clone()]).unwrap()); + self.optical_val = processed_optical_data as f64; + + let keyence_status = self + .keyence_checker + .check_keyence_agrees(keyence_data.clone()); + + if keyence_status == SensorChecks::Unacceptable { + return Err(PreprocessorError::KeyenceUnacceptable); + } else { + self.keyence_val = (keyence_data[0] as f64) * STRIPE_WIDTH; + } + + let processed_accelerometer_data = self + .accelerometer_preprocessor + .process_data(accelerometer_data); + if processed_accelerometer_data.is_none() { + return Err(PreprocessorError::AccelerometerUnnaceptable); + } + + let processed_accelerometer_data = processed_accelerometer_data.unwrap(); + self.accelerometer_val = 0.0; + for i in 0..NUM_ACCELEROMETERS { + for _ in 0..NUM_AXIS { + self.accelerometer_val += processed_accelerometer_data[i] as f64; + } + } + self.accelerometer_val /= (NUM_ACCELEROMETERS * NUM_AXIS) as f64; + + Ok(()) + } + + /// Preprocesses the sensor data by calling the preprocessor and then runs the kalman filter + pub fn iteration( + &mut self, + optical_data: Vec, + keyence_data: Vec, + accelerometer_data: RawAccelerometerData, + ) -> Result<(), PreprocessorError> { + self.preprocessor( + optical_data.clone(), + keyence_data.clone(), + accelerometer_data.clone(), + )?; + + let control_input = Vector1::new(self.accelerometer_val); + + self.kalman_filter.predict(&control_input); + + let measurement = Vector2::new(self.keyence_val * STRIPE_WIDTH, self.optical_val); + + self.kalman_filter.update(&measurement); + + let state = self.kalman_filter.get_state(); + + self.displacement = state[0]; + self.velocity = state[1]; + self.acceleration = self.accelerometer_val; + self.previous_velocity = self.velocity; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_localizer_with_zeros() -> Result<(), PreprocessorError> { + let mut localizer = Localizer::default(); + + let optical_data: Vec = Vec::from_slice(&[0.0, 0.0]).unwrap(); + let raw_keyence_data: Vec = Vec::from_slice(&[0, 0]).unwrap(); + let raw_accelerometer_data: RawAccelerometerData = + RawAccelerometerData::from_slice(&[ + Vec::from_slice(&[0.0, 0.0, 0.0]).unwrap(), + Vec::from_slice(&[0.0, 0.0, 0.0]).unwrap(), + Vec::from_slice(&[0.0, 0.0, 0.0]).unwrap(), + Vec::from_slice(&[0.0, 0.0, 0.0]).unwrap(), + ]) + .unwrap(); + + localizer.iteration(optical_data, raw_keyence_data, raw_accelerometer_data)?; + + assert_eq!(localizer.displacement, 0.0); + assert_eq!(localizer.velocity, 0.0); + assert_eq!(localizer.acceleration, 0.0); + + Ok(()) + } +} diff --git a/lib/localisation/src/control/navigator.rs b/lib/localisation/src/control/navigator.rs deleted file mode 100644 index 8b137891..00000000 --- a/lib/localisation/src/control/navigator.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/localisation/src/preprocessing/accelerometer.rs b/lib/localisation/src/preprocessing/accelerometer.rs index 30784f7f..f7ba3853 100644 --- a/lib/localisation/src/preprocessing/accelerometer.rs +++ b/lib/localisation/src/preprocessing/accelerometer.rs @@ -1,6 +1,7 @@ -use crate::types::{AccelerometerData, RawAccelerometerData, SensorChecks}; - -use super::super::types::{K_NUM_ACCELEROMETERS, K_NUM_ALLOWED_ACCELEROMETER_OUTLIERS, K_NUM_AXIS}; +use crate::types::{ + AccelerometerData, RawAccelerometerData, SensorChecks, NUM_ACCELEROMETERS, + NUM_ALLOWED_ACCELEROMETER_OUTLIERS, NUM_AXIS, +}; use heapless::Vec; /// Stores the quartiles of the data and the bounds for outliers @@ -38,9 +39,9 @@ pub struct AccelerometerPreprocessor { /// number of true values in reliable_accelerometers num_reliable_accelerometers: i32, /// true if accelerometer at index is reliable - reliable_accelerometers: [bool; K_NUM_ACCELEROMETERS], + reliable_accelerometers: [bool; NUM_ACCELEROMETERS], /// number of outliers detected for each accelerometer - num_outliers_per_accelerometer: [i32; K_NUM_ACCELEROMETERS], + num_outliers_per_accelerometer: [i32; NUM_ACCELEROMETERS], } impl AccelerometerPreprocessor { @@ -48,9 +49,9 @@ impl AccelerometerPreprocessor { /// By default, all accelerometers are deemed as reliable pub fn new() -> Self { Self { - num_reliable_accelerometers: K_NUM_ACCELEROMETERS as i32, - reliable_accelerometers: [true; K_NUM_ACCELEROMETERS], - num_outliers_per_accelerometer: [0; K_NUM_ACCELEROMETERS], + num_reliable_accelerometers: NUM_ACCELEROMETERS as i32, + reliable_accelerometers: [true; NUM_ACCELEROMETERS], + num_outliers_per_accelerometer: [0; NUM_ACCELEROMETERS], } } @@ -60,8 +61,8 @@ impl AccelerometerPreprocessor { /// are replaced with the median of the data pub fn handle_outliers( &mut self, - data: AccelerometerData, - ) -> Option> { + data: AccelerometerData, + ) -> Option> { let quartiles = self.calculate_quartiles(&data)?; let accelerometer_data = data @@ -90,12 +91,12 @@ impl AccelerometerPreprocessor { /// If more than one accelerometer is unreliable, None is returned pub fn calculate_quartiles( &self, - data: &AccelerometerData, + data: &AccelerometerData, ) -> Option { - if self.num_reliable_accelerometers == K_NUM_ACCELEROMETERS as i32 { + if self.num_reliable_accelerometers == NUM_ACCELEROMETERS as i32 { Some(self.get_quartiles(data)) - } else if self.num_reliable_accelerometers == (K_NUM_ACCELEROMETERS as i32 - 1) { - const SIZE: usize = K_NUM_ACCELEROMETERS - 1; + } else if self.num_reliable_accelerometers == (NUM_ACCELEROMETERS as i32 - 1) { + const SIZE: usize = NUM_ACCELEROMETERS - 1; let filtered_data: AccelerometerData = data .iter() .enumerate() @@ -115,9 +116,9 @@ impl AccelerometerPreprocessor { /// Unreliable data is deemed unacceptable and the function returns None pub fn process_data( &mut self, - data: RawAccelerometerData, - ) -> Option> { - let accelerometer_data: AccelerometerData = data + data: RawAccelerometerData, + ) -> Option> { + let accelerometer_data: AccelerometerData = data .iter() .map(|axis| axis.iter().fold(0.0, |acc, val| acc + val * val).sqrt()) .collect(); @@ -132,7 +133,7 @@ impl AccelerometerPreprocessor { } /// Sets accelerometers as unreliable if they have more than - /// K_NUM_ALLOWED_ACCELEROMETER_OUTLIERS outliers detected + /// NUM_ALLOWED_ACCELEROMETER_OUTLIERS outliers detected /// Deems the data unacceptable if more than 1 accelerometer is unreliable pub fn check_reliable(&mut self) -> SensorChecks { self.num_outliers_per_accelerometer @@ -140,14 +141,14 @@ impl AccelerometerPreprocessor { .enumerate() .for_each(|(i, val)| { if self.reliable_accelerometers[i] - && val >= &(K_NUM_ALLOWED_ACCELEROMETER_OUTLIERS as i32) + && val >= &(NUM_ALLOWED_ACCELEROMETER_OUTLIERS as i32) { self.reliable_accelerometers[i] = false; self.num_reliable_accelerometers -= 1; } }); - if self.num_reliable_accelerometers < K_NUM_ACCELEROMETERS as i32 - 1 { + if self.num_reliable_accelerometers < NUM_ACCELEROMETERS as i32 - 1 { return SensorChecks::Unacceptable; } @@ -177,7 +178,7 @@ impl AccelerometerPreprocessor { quartiles[0], quartiles[1], quartiles[2], - self.num_reliable_accelerometers < K_NUM_ACCELEROMETERS as i32, + self.num_reliable_accelerometers < NUM_ACCELEROMETERS as i32, ) } } @@ -193,14 +194,14 @@ mod tests { assert_eq!(preprocessor.num_reliable_accelerometers, 4); assert_eq!( preprocessor.reliable_accelerometers, - [true; K_NUM_ACCELEROMETERS] + [true; NUM_ACCELEROMETERS] ); assert_eq!( preprocessor.num_outliers_per_accelerometer, - [0; K_NUM_ACCELEROMETERS] + [0; NUM_ACCELEROMETERS] ); - let raw_data: RawAccelerometerData = + let raw_data: RawAccelerometerData = RawAccelerometerData::from_slice(&[ Vec::from_slice(&[1.0, 2.0, 3.0]).unwrap(), // sqrt(14) = 3.74 Vec::from_slice(&[4.0, 5.0, 6.0]).unwrap(), // sqrt(77) = 8.77 @@ -226,17 +227,17 @@ mod tests { assert_eq!(preprocessor.num_reliable_accelerometers, 4); assert_eq!( preprocessor.reliable_accelerometers, - [true; K_NUM_ACCELEROMETERS] + [true; NUM_ACCELEROMETERS] ); assert_eq!( preprocessor.num_outliers_per_accelerometer, - [0; K_NUM_ACCELEROMETERS] + [0; NUM_ACCELEROMETERS] ); preprocessor.reliable_accelerometers = [true, false, true, true]; preprocessor.num_reliable_accelerometers = 3; - let raw_data: RawAccelerometerData = + let raw_data: RawAccelerometerData = RawAccelerometerData::from_slice(&[ Vec::from_slice(&[1.0, 2.0, 3.0]).unwrap(), // sqrt(14) = 3.74 Vec::from_slice(&[4.0, 5.0, 6.0]).unwrap(), // sqrt(Median (3.74, 13.93, @@ -260,7 +261,7 @@ mod tests { pub fn test_get_quartiles() { let preprocessor = AccelerometerPreprocessor::new(); - let data: AccelerometerData = + let data: AccelerometerData = AccelerometerData::from_slice(&[1.0, 2.0, 3.0, 4.0]).unwrap(); let processed_data = preprocessor.get_quartiles(&data); @@ -273,7 +274,7 @@ mod tests { fn test_calculate_quartiles_max_reliable() { let preprocessor = AccelerometerPreprocessor::new(); - let data: AccelerometerData = + let data: AccelerometerData = AccelerometerData::from_slice(&[1.0, 2.0, 3.0, 4.0]).unwrap(); let processed_data = preprocessor.calculate_quartiles(&data); @@ -294,7 +295,7 @@ mod tests { preprocessor.reliable_accelerometers = [true, false, true, true]; preprocessor.num_reliable_accelerometers = 3; - let data: AccelerometerData = + let data: AccelerometerData = AccelerometerData::from_slice(&[1.0, 2.0, 3.0, 4.0]).unwrap(); let processed_data = preprocessor.calculate_quartiles(&data); @@ -315,13 +316,13 @@ mod tests { preprocessor.reliable_accelerometers = [true, false, true, true]; preprocessor.num_reliable_accelerometers = 3; - let data: AccelerometerData = + let data: AccelerometerData = AccelerometerData::from_slice(&[1.0, 2.0, 3.0, 10.0]).unwrap(); let processed_data = preprocessor.handle_outliers(data); assert!(processed_data.is_some()); - let processed_data: AccelerometerData = processed_data.unwrap(); + let processed_data: AccelerometerData = processed_data.unwrap(); assert_eq!(processed_data[0], 1.0); assert_eq!(processed_data[1], 3.0); // replace unreliable with median assert_eq!(processed_data[2], 3.0); @@ -334,7 +335,7 @@ mod tests { preprocessor.reliable_accelerometers = [true, false, false, true]; preprocessor.num_reliable_accelerometers = 2; - let data: AccelerometerData = + let data: AccelerometerData = AccelerometerData::from_slice(&[1.0, 2.0, 3.0, 10.0]).unwrap(); let processed_data = preprocessor.handle_outliers(data); diff --git a/lib/localisation/src/preprocessing/keyence.rs b/lib/localisation/src/preprocessing/keyence.rs index fd81cbb5..caafefe2 100644 --- a/lib/localisation/src/preprocessing/keyence.rs +++ b/lib/localisation/src/preprocessing/keyence.rs @@ -3,7 +3,7 @@ use heapless::Vec; #[derive(PartialEq, Debug)] pub enum SensorChecks { Acceptable, - Unnaceptable, + Unacceptable, } /// Checks if the two Keyence sensors are in agreement. @@ -25,9 +25,9 @@ impl KeyenceAgrees { } } - pub fn check_keyence_agrees(&mut self, keyence_data: Vec) -> SensorChecks { + pub fn check_keyence_agrees(&mut self, keyence_data: Vec) -> SensorChecks { if keyence_data[0] != keyence_data[1] && !self.previous_keyence_agreement { - return SensorChecks::Unnaceptable; + return SensorChecks::Unacceptable; } else { self.previous_keyence_agreement = keyence_data[0] == keyence_data[1]; } @@ -42,7 +42,7 @@ mod tests { #[test] fn test_acceptable_success() { - let keyence_data: Vec = Vec::from_slice(&[true, true]).unwrap(); + let keyence_data: Vec = Vec::from_slice(&[0, 1]).unwrap(); let mut keyence_agrees = KeyenceAgrees::new(); let desired_outcome = SensorChecks::Acceptable; let result = keyence_agrees.check_keyence_agrees(keyence_data); @@ -51,7 +51,7 @@ mod tests { #[test] fn test_acceptable_false_success() { - let keyence_data: Vec = Vec::from_slice(&[true, false]).unwrap(); + let keyence_data: Vec = Vec::from_slice(&[0, 1]).unwrap(); let mut keyence_agrees = KeyenceAgrees::new(); let desired_outcome = SensorChecks::Acceptable; let result = keyence_agrees.check_keyence_agrees(keyence_data); @@ -60,8 +60,8 @@ mod tests { #[test] fn test_acceptable_second_false_success() { - let first_keyence_data: Vec = Vec::from_slice(&[true, true]).unwrap(); - let second_keyence_data: Vec = Vec::from_slice(&[true, false]).unwrap(); + let first_keyence_data: Vec = Vec::from_slice(&[1, 1]).unwrap(); + let second_keyence_data: Vec = Vec::from_slice(&[1, 1]).unwrap(); let mut keyence_agrees = KeyenceAgrees::new(); let desired_outcome = SensorChecks::Acceptable; let initial_try = keyence_agrees.check_keyence_agrees(first_keyence_data); @@ -72,8 +72,8 @@ mod tests { #[test] fn test_acceptable_prev_false_success() { - let first_keyence_data: Vec = Vec::from_slice(&[true, false]).unwrap(); - let second_keyence_data: Vec = Vec::from_slice(&[true, true]).unwrap(); + let first_keyence_data: Vec = Vec::from_slice(&[1, 2]).unwrap(); + let second_keyence_data: Vec = Vec::from_slice(&[1, 1]).unwrap(); let mut keyence_agrees = KeyenceAgrees::new(); let desired_outcome = SensorChecks::Acceptable; let initial_try = keyence_agrees.check_keyence_agrees(first_keyence_data); @@ -84,11 +84,11 @@ mod tests { #[test] fn test_unnacceptable_prev_false_success() { - let first_keyence_data: Vec = Vec::from_slice(&[true, false]).unwrap(); - let second_keyence_data: Vec = Vec::from_slice(&[true, false]).unwrap(); + let first_keyence_data: Vec = Vec::from_slice(&[1, 2]).unwrap(); + let second_keyence_data: Vec = Vec::from_slice(&[2, 3]).unwrap(); let mut keyence_agrees = KeyenceAgrees::new(); let first_outcome = SensorChecks::Acceptable; - let second_outcome = SensorChecks::Unnaceptable; + let second_outcome = SensorChecks::Unacceptable; let initial_try = keyence_agrees.check_keyence_agrees(first_keyence_data); let result = keyence_agrees.check_keyence_agrees(second_keyence_data); assert_eq!(initial_try, first_outcome); diff --git a/lib/localisation/src/preprocessing/lib.rs b/lib/localisation/src/preprocessing/lib.rs deleted file mode 100644 index a7d02f83..00000000 --- a/lib/localisation/src/preprocessing/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod accelerometer; diff --git a/lib/localisation/src/preprocessing/optical.rs b/lib/localisation/src/preprocessing/optical.rs index 2198f6ec..b237c660 100644 --- a/lib/localisation/src/preprocessing/optical.rs +++ b/lib/localisation/src/preprocessing/optical.rs @@ -1,60 +1,53 @@ +use crate::types::NUM_OPTICAL_SENSORS; use heapless::Vec; use libm::sqrtf; /// Processes the raw optical data to get the magnitude and added to the optical data for each sensor -pub fn process_data(raw_optical_data: Vec, 2>) -> Vec { - let mut optical_data: Vec = Vec::from_slice(&[0.0, 0.0]).unwrap(); +pub fn process_optical_data(raw_optical_data: Vec, NUM_OPTICAL_SENSORS>) -> f32 { + let mut total_magnitude: f32 = 0.0; - for i in 0..2 { + for sensor_data in raw_optical_data { let mut magnitude: f32 = 0.0; - for data in raw_optical_data[i].clone() { + for data in sensor_data { let data: f32 = data as f32; magnitude += data * data; } - optical_data[i] = sqrtf(magnitude); + total_magnitude += sqrtf(magnitude); } - optical_data + total_magnitude / NUM_OPTICAL_SENSORS as f32 } #[cfg(test)] mod tests { use super::*; + use crate::types::NUM_OPTICAL_SENSORS; #[test] fn test_correct_positive() { - let raw_optical_data: Vec, 2> = Vec::from_slice(&[ - Vec::from_slice(&[1.0, 1.0]).unwrap(), - Vec::from_slice(&[3.0, 4.0]).unwrap(), - ]) - .unwrap(); - let desired_outcome: Vec = Vec::from_slice(&[sqrtf(2.0), 5.0]).unwrap(); - let result = process_data(raw_optical_data); + let raw_optical_data: Vec, NUM_OPTICAL_SENSORS> = + Vec::from_slice(&[Vec::from_slice(&[1.0, 1.0]).unwrap()]).unwrap(); + let desired_outcome: f32 = sqrtf(2.0); + let result = process_optical_data(raw_optical_data); assert_eq!(result, desired_outcome); } #[test] fn test_correct_negative() { - let raw_optical_data: Vec, 2> = Vec::from_slice(&[ - Vec::from_slice(&[-4.0, -6.0]).unwrap(), - Vec::from_slice(&[-3.0, -1.0]).unwrap(), - ]) - .unwrap(); - let desired_outcome: Vec = Vec::from_slice(&[7.2111025, 3.1622777]).unwrap(); - let result = process_data(raw_optical_data); + let raw_optical_data: Vec, NUM_OPTICAL_SENSORS> = + Vec::from_slice(&[Vec::from_slice(&[-4.0, -6.0]).unwrap()]).unwrap(); + let desired_outcome: f32 = sqrtf(52.0); + let result = process_optical_data(raw_optical_data); assert_eq!(result, desired_outcome); } #[test] fn test_correct_zero() { - let raw_optical_data: Vec, 2> = Vec::from_slice(&[ - Vec::from_slice(&[0.0, 0.0]).unwrap(), - Vec::from_slice(&[0.0, 0.0]).unwrap(), - ]) - .unwrap(); - let desired_outcome: Vec = Vec::from_slice(&[0.0, 0.0]).unwrap(); - let result = process_data(raw_optical_data); + let raw_optical_data: Vec, NUM_OPTICAL_SENSORS> = + Vec::from_slice(&[Vec::from_slice(&[0.0, 0.0]).unwrap()]).unwrap(); + let desired_outcome: f32 = 0.0; + let result = process_optical_data(raw_optical_data); assert_eq!(result, desired_outcome); } } diff --git a/lib/localisation/src/types.rs b/lib/localisation/src/types.rs index 77042ef8..6a3446fc 100644 --- a/lib/localisation/src/types.rs +++ b/lib/localisation/src/types.rs @@ -1,8 +1,9 @@ use heapless::Vec; -pub const K_NUM_ACCELEROMETERS: usize = 4; -pub const K_NUM_AXIS: usize = 3; -pub const K_NUM_ALLOWED_ACCELEROMETER_OUTLIERS: usize = 2; +pub const NUM_ACCELEROMETERS: usize = 4; +pub const NUM_AXIS: usize = 3; +pub const NUM_ALLOWED_ACCELEROMETER_OUTLIERS: usize = 2; +pub const NUM_OPTICAL_SENSORS: usize = 1; #[derive(PartialEq)] pub enum SensorChecks { diff --git a/telemetry/biome.json b/telemetry/biome.json index 53830897..96d90e25 100644 --- a/telemetry/biome.json +++ b/telemetry/biome.json @@ -12,7 +12,8 @@ }, "formatter": { "enabled": true, - "indentStyle": "tab" + "indentStyle": "tab", + "formatWithErrors": true }, "organizeImports": { "enabled": true @@ -26,6 +27,9 @@ "javascript": { "formatter": { "quoteStyle": "single" + }, + "parser": { + "unsafeParameterDecoratorsEnabled": true } } } diff --git a/telemetry/packages/constants/package.json b/telemetry/packages/constants/package.json index 21d9d2f2..c5f91279 100644 --- a/telemetry/packages/constants/package.json +++ b/telemetry/packages/constants/package.json @@ -15,8 +15,13 @@ "devDependencies": { "@hyped/telemetry-types": "workspace:*", "@hyped/tsconfig": "workspace:*", + "@types/node": "^22.12.0", "dts-cli": "^2.0.5", "tslib": "^2.5.3", "typescript": "^5.7.3" + }, + "dependencies": { + "yaml": "^2.7.0", + "zod": "^3.21.4" } } diff --git a/telemetry/packages/constants/src/index.ts b/telemetry/packages/constants/src/index.ts index 76ad210d..e0e672a4 100644 --- a/telemetry/packages/constants/src/index.ts +++ b/telemetry/packages/constants/src/index.ts @@ -1,5 +1,4 @@ -export { pods, POD_IDS } from './pods/pods'; -export type { PodId, Pods } from './pods/pods'; +export { pods, podIds } from './pods/pods'; export { ALL_POD_STATES, PASSIVE_STATES, diff --git a/telemetry/packages/constants/src/openmct/object-types/object-types.ts b/telemetry/packages/constants/src/openmct/object-types/object-types.ts index 287c3fe6..db81a653 100644 --- a/telemetry/packages/constants/src/openmct/object-types/object-types.ts +++ b/telemetry/packages/constants/src/openmct/object-types/object-types.ts @@ -1,16 +1,17 @@ import type { OpenMctObjectTypes } from '@hyped/telemetry-types'; +import type { telemetryTypes } from '../../pods/types'; -export const openMctObjectTypes: OpenMctObjectTypes = [ +export type OpenMctObjectTypeId = (typeof telemetryTypes)[number]; + +type StrictOpenMctObjectTypes = OpenMctObjectTypes & + { id: OpenMctObjectTypeId }[]; + +export const openMctObjectTypes: StrictOpenMctObjectTypes = [ { id: 'temperature', name: 'Temperature', icon: 'icon-telemetry', }, - { - id: 'thermistor', - name: 'Thermistor', - icon: 'icon-telemetry', - }, { id: 'acceleration', name: 'Acceleration', @@ -41,14 +42,29 @@ export const openMctObjectTypes: OpenMctObjectTypes = [ name: 'status', icon: 'icon-telemetry', }, + { + id: 'magnetism', + name: 'Magnetism', + icon: 'icon-telemetry', + }, { id: 'keyence', name: 'Keyence', icon: 'icon-telemetry', }, { - id: 'brake_feedback', - name: 'Brake Feedback', + id: 'resistance', + name: 'Resistance', + icon: 'icon-telemetry', + }, + { + id: 'levitation', + name: 'Levitation', + icon: 'icon-telemetry', + }, + { + id: 'binary-status', + name: 'Binary Status', icon: 'icon-telemetry', }, ]; diff --git a/telemetry/packages/constants/src/pods/common.ts b/telemetry/packages/constants/src/pods/common.ts deleted file mode 100644 index a0fe7177..00000000 --- a/telemetry/packages/constants/src/pods/common.ts +++ /dev/null @@ -1,82 +0,0 @@ -export const accelerometerCommon = { - format: 'float', - type: 'motion', - unit: 'm/s²', - limits: { - critical: { - low: -150, - high: 150, - }, - }, - rms_noise: 16.25 * 10 ** -3, // RMS rms_noise [mg] at ±15g range (~ ±150m/s^2) - sampling_time: 500, -} as const; - -// datasheet: https://www.st.com/en/mems-and-sensors/stts22h.html#st_description_sec-nav-tab -export const thermistorCommon = { - format: 'float', - type: 'temperature', - unit: '°C', - limits: { - critical: { - low: -40, - high: 125, - }, - warning: { - low: 20, - high: 100, - }, - }, - rms_noise: 0.05, // RMS rms_noise - sampling_time: 500, // test value. Datasheet specifies clock frequency range as (10 - 400 kHz) -} as const; - -export const pressureCommon = { - format: 'float', - type: 'pressure', - unit: 'bar', - rms_noise: 1 * 10 ** -3, // placeholder estimate of 1 mbar, to be confirmed with datasheet when chosen sensor confirmed - sampling_time: 500, -} as const; - -export const hallEffectCommon = { - format: 'float', - type: 'magnetism', - unit: 'A', - limits: { - critical: { - low: 0, - high: 500, - }, - }, - rms_noise: 0.5, // placeholder guesstimate, waiting on datasheet - sampling_time: 500, -} as const; - -export const keyenceCommon = { - format: 'integer', - type: 'keyence', - unit: 'number of stripes', - limits: { - critical: { - low: 0, - high: 16, - }, - }, - rms_noise: 0, - sampling_time: 500, -} as const; - -export const levitationHeightCommon = { - format: 'float', - type: 'levitation', - unit: 'mm', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 0, // placeholder - sampling_time: 500, // placeholder -} as const; diff --git a/telemetry/packages/constants/src/pods/pods.ts b/telemetry/packages/constants/src/pods/pods.ts index bc9f0b39..ca2e77e1 100644 --- a/telemetry/packages/constants/src/pods/pods.ts +++ b/telemetry/packages/constants/src/pods/pods.ts @@ -1,879 +1,81 @@ -import type { Pod } from '@hyped/telemetry-types'; -import { - accelerometerCommon, - hallEffectCommon, - keyenceCommon, - levitationHeightCommon, - pressureCommon, - thermistorCommon, -} from './common'; - -export const POD_IDS = ['pod_1', 'pod_2024'] as const; -export type PodId = (typeof POD_IDS)[number]; -export type Pods = Record; - -export const pods: Pods = { - pod_1: { - id: 'pod_1', - name: 'Pod Ness', - operationMode: 'ALL_SYSTEMS_ON', - measurements: { - // ************************************ ACCELEROMETERS ************************************ // - accelerometer_1: { - name: 'Accelerometer 1', - key: 'accelerometer_1', - ...accelerometerCommon, - }, - accelerometer_2: { - name: 'Accelerometer 2', - key: 'accelerometer_2', - ...accelerometerCommon, - }, - accelerometer_3: { - name: 'Accelerometer 3', - key: 'accelerometer_3', - ...accelerometerCommon, - }, - accelerometer_4: { - name: 'Accelerometer 4', - key: 'accelerometer_4', - ...accelerometerCommon, - }, - accelerometer_avg: { - name: 'Accelerometer Average', - key: 'accelerometer_avg', - ...accelerometerCommon, - }, - - // ************************************ NAVIGATION ************************************ // - displacement: { - name: 'Displacement', - key: 'displacement', - format: 'float', - type: 'motion', - unit: 'm', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 0, - sampling_time: accelerometerCommon.sampling_time, - }, - velocity: { - name: 'Velocity', - key: 'velocity', - format: 'float', - type: 'motion', - unit: 'm/s', - limits: { - critical: { - low: 0, - high: 50, - }, - }, - rms_noise: accelerometerCommon.rms_noise, - sampling_time: accelerometerCommon.sampling_time, - }, - acceleration: { - name: 'Acceleration', - key: 'acceleration', - format: 'float', - type: 'motion', - unit: 'm/s²', - limits: { - critical: { - low: 0, - high: 5, - }, - }, - rms_noise: accelerometerCommon.rms_noise, - sampling_time: accelerometerCommon.sampling_time, - }, - - // ************************************ PRESSURE ************************************ // - pressure_back_pull: { - name: 'Pressure – Back Pull', - key: 'pressure_back_pull', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_front_pull: { - name: 'Pressure – Front Pull', - key: 'pressure_front_pull', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_front_push: { - name: 'Pressure – Front Push', - key: 'pressure_front_push', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_back_push: { - name: 'Pressure – Back Push', - key: 'pressure_back_push', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_brakes_reservoir: { - name: 'Pressure – Brakes Reservoir', - key: 'pressure_brakes_reservoir', - ...pressureCommon, - limits: { - critical: { - low: 3, - high: 7.4, - }, - warning: { - low: 3.5, - high: 6.9, - }, - }, - }, - pressure_active_suspension_reservoir: { - name: 'Pressure – Active Suspension Reservoir', - key: 'pressure_active_suspension_reservoir', - ...pressureCommon, - limits: { - critical: { - low: 3, - high: 7.4, - }, - warning: { - low: 3.5, - high: 6.9, - }, - }, - }, - pressure_front_brake: { - name: 'Pressure – Front Brake', - key: 'pressure_front_brake', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 4.2, - }, - warning: { - low: -0.19, - high: 4, - }, - }, - }, - pressure_back_brake: { - name: 'Pressure – Back Brake', - key: 'pressure_back_brake', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 4.2, - }, - warning: { - low: -0.19, - high: 4, - }, - }, - }, - - // ************************************ THERMISTORS ************************************ // - thermistor_1: { - name: 'Thermistor 1', - key: 'thermistor_1', - ...thermistorCommon, - }, - thermistor_2: { - name: 'Thermistor 2', - key: 'thermistor_2', - ...thermistorCommon, - }, - thermistor_3: { - name: 'Thermistor 3', - key: 'thermistor_3', - ...thermistorCommon, - }, - thermistor_4: { - name: 'Thermistor 4', - key: 'thermistor_4', - ...thermistorCommon, - }, - thermistor_5: { - name: 'Thermistor 5', - key: 'thermistor_5', - ...thermistorCommon, - }, - thermistor_6: { - name: 'Thermistor 6', - key: 'thermistor_6', - ...thermistorCommon, - }, - thermistor_7: { - name: 'Thermistor 7', - key: 'thermistor_7', - ...thermistorCommon, - }, - thermistor_8: { - name: 'Thermistor 8', - key: 'thermistor_8', - ...thermistorCommon, - }, - thermistor_9: { - name: 'Thermistor 9', - key: 'thermistor_9', - ...thermistorCommon, - }, - thermistor_10: { - name: 'Thermistor 10', - key: 'thermistor_10', - ...thermistorCommon, - }, - thermistor_11: { - name: 'Thermistor 11', - key: 'thermistor_11', - ...thermistorCommon, - }, - thermistor_12: { - name: 'Thermistor 12', - key: 'thermistor_12', - ...thermistorCommon, - }, - // thermistor_13: { - // name: 'Thermistor 13', - // key: 'thermistor_13', - // ...thermistorCommon, - // }, - // thermistor_14: { - // name: 'Thermistor 14', - // key: 'thermistor_14', - // ...thermistorCommon, - // }, - // thermistor_15: { - // name: 'Thermistor 15', - // key: 'thermistor_15', - // ...thermistorCommon, - // }, - // thermistor_16: { - // name: 'Thermistor 16', - // key: 'thermistor_16', - // ...thermistorCommon, - // }, - - // ************************************ HALL EFFECTS ************************************ // - hall_effect_1: { - name: 'Hall Effect 1', - key: 'hall_effect_1', - ...hallEffectCommon, - }, - hall_effect_2: { - name: 'Hall Effect 2', - key: 'hall_effect_2', - ...hallEffectCommon, - }, - - // ************************************ STATUS ************************************ // - brake_clamp_status: { - name: 'Brake Clamp Status', - key: 'brake_clamp_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'CLAMPED', - }, - { - value: 0, - string: 'UNCLAMPED', - }, - ], - }, - pod_raised_status: { - name: 'Pod Raised Status', - key: 'pod_raised_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'RAISED', - }, - { - value: 0, - string: 'LOWERED', - }, - ], - }, - - battery_status: { - name: 'Battery Status', - key: 'battery_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'HEALTHY', - }, - { - value: 0, - string: 'UNHEALTHY', - }, - ], - }, - - motor_controller_status: { - name: 'Motor Controller Status', - key: 'motor_controller_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'HEALTHY', - }, - { - value: 0, - string: 'UNHEALTHY', - }, - ], - }, - - high_power_status: { - name: 'High Power Status', - key: 'high_power_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'ACTIVE', - }, - { - value: 0, - string: 'OFF', - }, - ], - }, - - // ************************************ KEYENCE ************************************ // - keyence_1: { - name: 'Keyence 1', - key: 'keyence_1', - ...keyenceCommon, - }, - keyence_2: { - name: 'Keyence 2', - key: 'keyence_2', - ...keyenceCommon, - }, - - // ************************************ POWER ************************************ // - power_line_resistance: { - name: 'Power Line Resistance', - key: 'power_line_resistance', - format: 'integer', - type: 'resistance', - unit: 'kΩ', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 0.1, - sampling_time: 500, - }, - }, - }, - - pod_2024: { - id: 'pod_2024', - name: 'Poddington', - operationMode: 'LEVITATION_ONLY', - measurements: { - // ************************************ ACCELEROMETERS ************************************ // - accelerometer_1: { - name: 'Accelerometer 1', - key: 'accelerometer_1', - ...accelerometerCommon, - }, - accelerometer_2: { - name: 'Accelerometer 2', - key: 'accelerometer_2', - ...accelerometerCommon, - }, - accelerometer_3: { - name: 'Accelerometer 3', - key: 'accelerometer_3', - ...accelerometerCommon, - }, - accelerometer_4: { - name: 'Accelerometer 4', - key: 'accelerometer_4', - ...accelerometerCommon, - }, - accelerometer_avg: { - name: 'Accelerometer Average', - key: 'accelerometer_avg', - ...accelerometerCommon, - }, - - // ************************************ NAVIGATION ************************************ // - displacement: { - name: 'Displacement', - key: 'displacement', - format: 'float', - type: 'motion', - unit: 'm', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 0, - sampling_time: accelerometerCommon.sampling_time, - }, - velocity: { - name: 'Velocity', - key: 'velocity', - format: 'float', - type: 'motion', - unit: 'm/s', - limits: { - critical: { - low: 0, - high: 50, - }, - }, - rms_noise: 0, - sampling_time: accelerometerCommon.sampling_time, - }, - acceleration: { - name: 'Acceleration', - key: 'acceleration', - format: 'float', - type: 'motion', - unit: 'm/s²', - limits: { - critical: { - low: 0, - high: 5, - }, - }, - rms_noise: accelerometerCommon.rms_noise, - sampling_time: accelerometerCommon.sampling_time, - }, - - // ************************************ PRESSURE ************************************ // - pressure_back_pull: { - name: 'Pressure – Back Pull', - key: 'pressure_back_pull', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_front_pull: { - name: 'Pressure – Front Pull', - key: 'pressure_front_pull', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_front_push: { - name: 'Pressure – Front Push', - key: 'pressure_front_push', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_back_push: { - name: 'Pressure – Back Push', - key: 'pressure_back_push', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 5.5, - }, - warning: { - low: -0.19, - high: 5.2, - }, - }, - }, - pressure_brakes_reservoir: { - name: 'Pressure – Brakes Reservoir', - key: 'pressure_brakes_reservoir', - ...pressureCommon, - limits: { - critical: { - low: 3, - high: 7.4, - }, - warning: { - low: 3.5, - high: 6.9, - }, - }, - }, - pressure_active_suspension_reservoir: { - name: 'Pressure – Active Suspension Reservoir', - key: 'pressure_active_suspension_reservoir', - ...pressureCommon, - limits: { - critical: { - low: 3, - high: 7.4, - }, - warning: { - low: 3.5, - high: 6.9, - }, - }, - }, - pressure_front_brake: { - name: 'Pressure – Front Brake', - key: 'pressure_front_brake', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 4.2, - }, - warning: { - low: -0.19, - high: 4, - }, - }, - }, - pressure_back_brake: { - name: 'Pressure – Back Brake', - key: 'pressure_back_brake', - ...pressureCommon, - limits: { - critical: { - low: -0.2, - high: 4.2, - }, - warning: { - low: -0.19, - high: 4, - }, - }, - }, - - // ************************************ THERMISTORS ************************************ // - thermistor_1: { - name: 'Thermistor 1', - key: 'thermistor_1', - ...thermistorCommon, - }, - thermistor_2: { - name: 'Thermistor 2', - key: 'thermistor_2', - ...thermistorCommon, - }, - thermistor_3: { - name: 'Thermistor 3', - key: 'thermistor_3', - ...thermistorCommon, - }, - thermistor_4: { - name: 'Thermistor 4', - key: 'thermistor_4', - ...thermistorCommon, - }, - thermistor_5: { - name: 'Thermistor 5', - key: 'thermistor_5', - ...thermistorCommon, - }, - thermistor_6: { - name: 'Thermistor 6', - key: 'thermistor_6', - ...thermistorCommon, - }, - thermistor_7: { - name: 'Thermistor 7', - key: 'thermistor_7', - ...thermistorCommon, - }, - thermistor_8: { - name: 'Thermistor 8', - key: 'thermistor_8', - ...thermistorCommon, - }, - thermistor_9: { - name: 'Thermistor 9', - key: 'thermistor_9', - ...thermistorCommon, - }, - thermistor_10: { - name: 'Thermistor 10', - key: 'thermistor_10', - ...thermistorCommon, - }, - thermistor_11: { - name: 'Thermistor 11', - key: 'thermistor_11', - ...thermistorCommon, - }, - thermistor_12: { - name: 'Thermistor 12', - key: 'thermistor_12', - ...thermistorCommon, - }, - - // ************************************ HALL EFFECTS ************************************ // - hall_effect_1: { - name: 'Hall Effect 1', - key: 'hall_effect_1', - ...hallEffectCommon, - }, - hall_effect_2: { - name: 'Hall Effect 2', - key: 'hall_effect_2', - ...hallEffectCommon, - }, - - // ************************************ STATUS ************************************ // - brake_clamp_status: { - name: 'Brake Clamp Status', - key: 'brake_clamp_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'CLAMPED', - }, - { - value: 0, - string: 'UNCLAMPED', - }, - ], - }, - pod_raised_status: { - name: 'Pod Raised Status', - key: 'pod_raised_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'RAISED', - }, - { - value: 0, - string: 'LOWERED', - }, - ], - }, - - battery_status: { - name: 'Battery Status', - key: 'battery_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'HEALTHY', - }, - { - value: 0, - string: 'UNHEALTHY', - }, - ], - }, - - motor_controller_status: { - name: 'Motor Controller Status', - key: 'motor_controller_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'HEALTHY', - }, - { - value: 0, - string: 'UNHEALTHY', - }, - ], - }, - - high_power_status: { - name: 'High Power Status', - key: 'high_power_status', - format: 'enum', - type: 'status', - unit: 'state', - enumerations: [ - { - value: 1, - string: 'ACTIVE', - }, - { - value: 0, - string: 'OFF', - }, - ], - }, - - // ************************************ KEYENCE ************************************ // - keyence_1: { - name: 'Keyence 1', - key: 'keyence_1', - ...keyenceCommon, - }, - keyence_2: { - name: 'Keyence 2', - key: 'keyence_2', - ...keyenceCommon, - }, - - // ************************************ POWER ************************************ // - power_line_resistance: { - name: 'Power Line Resistance', - key: 'power_line_resistance', - format: 'integer', - type: 'resistance', - unit: 'kΩ', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 0.1, - sampling_time: 500, - }, - - // ************************************ LEVITATION ************************************ // - levitation_height_1: { - name: 'Levitation Height 1', - key: 'levitation_height_1', - ...levitationHeightCommon, - }, - levitation_height_2: { - name: 'Levitation Height 2', - key: 'levitation_height_2', - ...levitationHeightCommon, - }, - levitation_height_3: { - name: 'Levitation Height 3', - key: 'levitation_height_3', - ...levitationHeightCommon, - }, - levitation_height_4: { - name: 'Levitation Height 4', - key: 'levitation_height_4', - ...levitationHeightCommon, - }, - levitation_height_lateral_1: { - name: 'Levitation Height Lateral 1', - key: 'levitation_height_lateral_1', - format: 'float', - type: 'levitation', - unit: 'mm', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 2, // from Time-of-Flight datasheet - sampling_time: 500, - }, - levitation_height_lateral_2: { - name: 'Levitation Height 2', - key: 'levitation_height_lateral_2', - format: 'float', - type: 'levitation', - unit: 'mm', - limits: { - critical: { - low: 0, - high: 100, - }, - }, - rms_noise: 2, // from Time-of-Flight datasheet - sampling_time: 500, - }, - }, - }, +import * as fs from 'node:fs'; +import * as path from 'node:path'; +import { PodSchema } from '@hyped/telemetry-types'; +import * as YAML from 'yaml'; +import { z } from 'zod'; +import { telemetryTypes } from './types'; + +const CONFIG_FILE_NAME = 'pods.yaml'; +// Root of hyped repo +const CONFIG_PATH = path.join( + __dirname, + '..', + '..', + '..', + '..', + '..', + 'config', + CONFIG_FILE_NAME, +); + +const RawPodsSchema = z.object({ + pods: z.record( + z.object({ + label: z.string(), + measurements: z.record(z.object({}).passthrough()), + statuses: z.record(z.object({}).passthrough()), + }), + ), +}); + +// We also want to check the 'type' field of each measurement and status is one of the object types +// It would be unwise to add this directly to the schema (in the types package) because it would +// break the circular dependency between the types and constants packages. +type MeasurementType = (typeof telemetryTypes)[number]; +const validateType = ( + ctx: z.RefinementCtx, + items: Record, + type: 'measurements' | 'statuses', +) => { + for (const [id, item] of Object.entries(items)) { + const itemType = (item as { type: string }).type; + if (!telemetryTypes.includes(itemType as MeasurementType)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Invalid ${type.slice(0, -1)} type "${itemType}"`, + path: [type, id, 'type'], + }); + } + } }; +const ExtendedPodSchema = PodSchema.superRefine((pod, ctx) => { + validateType(ctx, pod.measurements, 'measurements'); + validateType(ctx, pod.statuses, 'statuses'); +}); + +const yamlContent = fs.readFileSync(CONFIG_PATH, 'utf8'); +const yamlData = RawPodsSchema.parse(YAML.parse(yamlContent)); + +export const pods = Object.fromEntries( + Object.entries(yamlData.pods).map(([podId, podData]) => [ + podId, + ExtendedPodSchema.parse({ + id: podId, + ...podData, + measurements: Object.fromEntries( + Object.entries(podData.measurements).map(([id, measurement]) => [ + id, + { id, ...measurement }, + ]), + ), + statuses: Object.fromEntries( + Object.entries(podData.statuses).map(([id, status]) => [ + id, + { id, ...status }, + ]), + ), + }), + ]), +); + +export const podIds = Object.keys(pods); diff --git a/telemetry/packages/constants/src/pods/types.ts b/telemetry/packages/constants/src/pods/types.ts new file mode 100644 index 00000000..dff7f4b6 --- /dev/null +++ b/telemetry/packages/constants/src/pods/types.ts @@ -0,0 +1,14 @@ +export const telemetryTypes = [ + 'temperature', + 'acceleration', + 'pressure', + 'hall_effect', + 'displacement', + 'velocity', + 'status', + 'magnetism', + 'keyence', + 'resistance', + 'levitation', + 'binary-status', +] as const; diff --git a/telemetry/packages/e2e-tests/.gitignore b/telemetry/packages/e2e-tests/.gitignore deleted file mode 100644 index 68c5d18f..00000000 --- a/telemetry/packages/e2e-tests/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -node_modules/ -/test-results/ -/playwright-report/ -/blob-report/ -/playwright/.cache/ diff --git a/telemetry/packages/e2e-tests/biome.json b/telemetry/packages/e2e-tests/biome.json deleted file mode 100644 index 784b6d85..00000000 --- a/telemetry/packages/e2e-tests/biome.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": ["../../biome.json"] -} diff --git a/telemetry/packages/e2e-tests/lib/mqtt.ts b/telemetry/packages/e2e-tests/lib/mqtt.ts deleted file mode 100644 index 17626784..00000000 --- a/telemetry/packages/e2e-tests/lib/mqtt.ts +++ /dev/null @@ -1,57 +0,0 @@ -import mqtt from 'mqtt'; - -export const client = mqtt.connect( - process.env.E2E_TEST_MQTT_BROKER || 'mqtt://localhost:1883', -); - -type MqttMessageValidation = (receivedTopic: string, message: Buffer) => void; - -/** - * Validates a message received on an MQTT topic. - * @param topic The topic the message will be send on - * @param validate A validation function which will be called with the received topic and message values - * @param timeout Time to wait (in ms) before giving up - */ -export async function validateMqttMessage( - trigger: () => void, - validate: MqttMessageValidation, - timeout = 1000, -): Promise { - const receivedMessages: { topic: string; message: Buffer }[] = []; - - return new Promise(async (resolve, reject) => { - const client = mqtt.connect( - process.env.E2E_TEST_MQTT_BROKER || 'mqtt://localhost:1883', - ); - - client.on('connect', async () => { - await client.subscribeAsync('#'); - - // Handle incoming messages - client.on('message', (receivedTopic, message) => { - receivedMessages.push({ topic: receivedTopic, message }); - }); - - trigger(); - - // Check that the message is in the received messages - const interval = setInterval(() => { - for (const receivedMessage of receivedMessages) { - try { - validate(receivedMessage.topic, receivedMessage.message); - clearInterval(interval); - client.end(); - resolve(); - } catch (e) { - // Ignore errors - } - } - }, 100); - }); - - // Timeout if the message is not received - setTimeout(() => { - reject(new Error(`Timeout waiting for message.`)); - }, timeout); - }); -} diff --git a/telemetry/packages/e2e-tests/package.json b/telemetry/packages/e2e-tests/package.json deleted file mode 100644 index c8b03b5a..00000000 --- a/telemetry/packages/e2e-tests/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@hyped/e2e-tests", - "version": "1.0.0", - "description": "End-to-end tests for HYPED Telemetry", - "main": "index.js", - "scripts": { - "e2e:test": "playwright test" - }, - "dependencies": { - "@hyped/telemetry-server": "workspace:*", - "@hyped/telemetry-ui": "workspace:*", - "mqtt": "^5.5.0" - }, - "devDependencies": { - "@hyped/tsconfig": "workspace:*", - "@playwright/test": "^1.42.1", - "@types/node": "^22.12.0" - } -} diff --git a/telemetry/packages/e2e-tests/playwright.config.ts b/telemetry/packages/e2e-tests/playwright.config.ts deleted file mode 100644 index 4ac81201..00000000 --- a/telemetry/packages/e2e-tests/playwright.config.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { defineConfig, devices } from '@playwright/test'; - -/** - * Read environment variables from file. - * https://github.com/motdotla/dotenv - */ -// require('dotenv').config(); - -/** - * See https://playwright.dev/docs/test-configuration. - */ -export default defineConfig({ - testDir: './tests', - /* Run tests in files in parallel */ - fullyParallel: true, - /* Fail the build on CI if you accidentally left test.only in the source code. */ - forbidOnly: !!process.env.CI, - /* Retry on CI only */ - retries: process.env.CI ? 2 : 0, - /* Opt out of parallel tests on CI. */ - workers: process.env.CI ? 1 : undefined, - /* Reporter to use. See https://playwright.dev/docs/test-reporters */ - reporter: 'html', - /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ - use: { - /* Base URL to use in actions like `await page.goto('/')`. */ - // baseURL: 'http://127.0.0.1:3000', - - /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ - trace: 'on-first-retry', - }, - - /* Configure projects for major browsers */ - projects: [ - { - name: 'chromium', - use: { ...devices['Desktop Chrome'] }, - }, - - { - name: 'firefox', - use: { ...devices['Desktop Firefox'] }, - }, - - { - name: 'webkit', - use: { ...devices['Desktop Safari'] }, - }, - ], - - /* Run your local dev server before starting the tests */ - webServer: { - command: 'cd ../../ && pnpm dev:test', // run from the root of the monorepo - url: 'http://localhost:5173', - reuseExistingServer: !process.env.CI, - }, -}); diff --git a/telemetry/packages/e2e-tests/tests/example.spec.ts b/telemetry/packages/e2e-tests/tests/example.spec.ts deleted file mode 100644 index 251c4d71..00000000 --- a/telemetry/packages/e2e-tests/tests/example.spec.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { expect, test } from '@playwright/test'; -import { client, validateMqttMessage } from '../lib/mqtt'; - -test('has title', async ({ page }) => { - await page.goto('http://localhost:5173'); - - await expect(page).toHaveTitle('HYPED24 | Telemetry'); -}); - -test('example mqtt test', async () => { - await validateMqttMessage( - // Pass in the function which will trigger the MQTT message to be sent. - // For example, this could be pushing a button on the GUI. - () => { - client.publish('hello', 'world'); - }, - // The validation function. Here you can validate that the topic and message body received is as expected. - (topic, message) => { - expect(topic).toBe('hello'); - expect(message.toString()).toBe('world'); - }, - ); -}); diff --git a/telemetry/packages/e2e-tests/tests/test-todo.md b/telemetry/packages/e2e-tests/tests/test-todo.md deleted file mode 100644 index 6b411d0a..00000000 --- a/telemetry/packages/e2e-tests/tests/test-todo.md +++ /dev/null @@ -1,101 +0,0 @@ -# End to End Testing Briefing - -Full list: [Issue #102](https://github.com/Hyp-ed/hyped-2024/issues/102) - -
    -
  1. All inputs on the sidebar, including setting levitation height, are visible with correct labels and the correct MQTT state transition message is emitted (for the GO button, make sure that the enable toggle works) - -
    - Issues -
      -
    • Setting Levitation Height
    • -
    • Correct Labels Visible
    • -
    • MQTT State Transition Message Emitted (Enable Toggle should work for 'GO' button)
    • -
    -
    - -
  2. -
    - -
  3. Switching between pods changes the selected pod for certain elements -
    - Issues -
      -
    • Setting Levitation Height
    • -
    • Correct Labels Visible
    • -
    • MQTT State Transition Message Emitted (Enable Toggle should work for 'GO' button)
    • -
    -
    -
  4. -
    - -
  5. All view options are listed, visible and render the correct component -
    - Issues -
      -
    • Setting Levitation Height
    • -
    • Correct Labels Visible
    • -
    • MQTT State Transition Message Emitted (Enable Toggle should work for 'GO' button)
    • -
    -
    -
  6. -
    - -
  7. State machine diagram renders with the expected nodes visible -
    - Issues -
      -
    -
    -
  8. -
    - -
  9. Open MCT plots measurements on graphs correctly (historical test) -
    - Issues -
      -
    -
    -
  10. -
    - -
  11. Open MCT plots live measurements from MQTT messages (realtime test) -
    - Issues -
      -
    -
    -
  12. -
    - -
  13. Open MCT logs a fault when a measurement limit is exceeded (check warning and critical limits) -
    - Issues -
      -
    -
    -
  14. -
    -
- -More to come... - - - - diff --git a/telemetry/packages/e2e-tests/tsconfig.json b/telemetry/packages/e2e-tests/tsconfig.json deleted file mode 100644 index 20fb9de9..00000000 --- a/telemetry/packages/e2e-tests/tsconfig.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "@hyped/tsconfig/base.json", - "compilerOptions": { - "jsx": "react" - } -} diff --git a/telemetry/packages/fake/README.md b/telemetry/packages/fake/README.md deleted file mode 100644 index f285482b..00000000 --- a/telemetry/packages/fake/README.md +++ /dev/null @@ -1,337 +0,0 @@ -# Telemetry - Fake Data Generation - -# Instructions of Use - -To begin the data simulation program, navigate to the `../server/fake/` directory. Run the command `> node main.js`. -Optional flags: - -
    -
  • --runtime [number]: specifies the simulation time (in seconds) with the following number argument. If left undefined, the default runtime is 30s, with the option to exit the process by running ^C anytime.
  • -
  • --random: sets random flag to true, invoking the randomising data generation logic for the full simulation. Primarily for debugging and front-end testing purposes.
  • -
  • --specific: Allows the choice to only simulate specific sensors. The permissible sensor types currently include: -
      -
    • motion
    • -
    • pressure
    • -
    • temperature
    • -
    • keyence
    • -
    • resistance
    • -
    • magnetism
    • -
    • levitation
    • -
    - Ensure that they are spelt correctly, as any typos will be rejected by the program and won't be simulated.
  • -
      - -
      - -### Purpose - -To iteratively generate transient and physically reasonable series of data points for the readings generated by all numerical-based sensors on the pod. Currently this does not cover the enumerated measurements, i.e. on/off status readings. This is effectively a -simulation of the expected sensor readings during a live run. Data will be uploaded live to the GUI through the mqtt server, and assessed -for error handling and other metrics. - -I.e. all of the thermnistors have the exact same property values. In actuality, their readings will differ as they placed in different locations across the pod. However, the data structure does not hold this information and therefore they are equivalent, so only one generic 'thermistor' object will be studied. - -In contrast, the pressure gauges have descriptive names which allow one to infer their likely behaviour during operation (front/back-push refers to gauging pressure upon acceleration, which would see an absolute increase in gauge pressure on both ends of the pod due to the greater stagnation pressure and the nose of the pod and pressure wake field behind). Similarly, the pressure gauges of the reservoirs can be reasonably assumed to not vary significantly, but as temperature rises the pressure will increase slightly due to Amonton's Law. - -
      - -### Methodology - -The program will record and store all values calculated by each sensor's methods defined in `sensors.ts` over a user-specified timeframe, with each sensor set to a specific sampling time interval. `main.ts` filters and constructs an array of relevant and unique sensors from the Pod Ness sensor object structure. A new interface is created called `Sensor Data`. It extends `RangeMeasurement` - adding the properties of `currentValue` and `movingAvg`, and encapsulates the measurement object with the sensor's name as a `Record`, reflecting the object shape of the pod's sensor objects. - -A Singleton class was created in its dedicated file, `data-manager.ts`. When the program is run, a single instance of this class is created. The instance holds the current set of data, and at each iteration is updated before the current data is pushed into data storage arrays for all sensors. Once created, this instance and its data property is accessible from both the main file, and the pod behaviours class file, which holds all the methods written to generate each sensor's next value. - -In the main file, the program's main function, `generateDataSeries`, runs a loop iterating through the time period defined. For each sensor type, it calls the relevant static method in the `Behaviour` class. Some sensors can be categorised, e.g. the navigation sensors. The values for displacement, velocity and acceleration are interdependet. As the pod's velocity is increased, its displacement and acceleration can be calculated, given the acceleration constraints provided by its critical limits nested object. - -A logistic function was chosen as the appropriate function for pod velocity to follow. This allows us to minimise the time to reach maximum speed by adjusting the peak acceleration (dv/dt) to reach but not exceed 5m/s^2 at the velocity-time inflection point. - -Similar functions have been or are in the process of being created for the rest of the sensors. Complexity varies, and some require a lot more guesswork. - -`generateDataSeries` takes in a boolean parameter `random`, set to false by default. If the user sets this to true, iteration will be completely random, selecting sensors' new values as a random number between their critical limits. - -
      -

      Project Structure

      -data-gen/ -| - main.ts // runs the program -| - index.ts // handles imports and exports -| - src/ -| | - sensors.ts // self-contained file with classes describing sensor functionality -| | - sensorData.csv // setup file defining sensor input params with read/write permissions -| - utils/ -| | - data-manager.ts // Singleton class structure used to track, update, upload and store data in real-time -| | - config.ts // sets up the program and all objects & variables used within -| | - helpers.ts // contains generic functions used throughout the project -| - tests/ // yet to be implemented -| - tsconfig.json // generic settings for ../server/ directory -| - README.md -
      - - -### Types - -New types are defined for this program: - -
        -
      • LiveMeasurement: extends RangeMeasurement, adding the properties of currentValue and timestep, allowing the adapted use of prewritten data structures.
      • -
      • SensorData: { [x: string]: LiveMeasurement }. This takes the sensor data and puts them back in the standard Record< string, Measurement> format.
      • -
      • StoredData: { [key: string]: [(number | string)][] }. This object has the sensors/measurements as keys, and at each sensors' timestep an array containing the time stamp and current value are pushed into the sensor's array. While data points will be uploaded in real-time, they are stored for recording and analysis purposes.
      • -
      • InitialState: [key: string]: { dt: number; initialVal: number; }. This interface defines the object which holds the user-specified properties. Again, the key is the sensor name. The user can read and write to a CSV file to specify any or all of the sensors' initial values and the time interval between their generated readings.
      • -
      - - - -### Config - -This setup stage imports nested `measurements` object from pods.ts data. The `rangeFilter` function filters this data into an array of only the measurement/sensor objects of type `RangeMeasurement`, and removes duplicate redundancy, then converts it back into an object of similar format to `measurements`. This makes the code readable and familiar. This new object is exported by default. - -The array manipulation removes those without a `limits` property (i.e. of type `EnumMeasurement`), and converts all duplicates into equivalent strings to facilitate filtering e.g. 'thermistor_1', 'thermistor_2' -> both become 'thermistor'. Upon the ultimate conversion back into an object (the code uses `Object.fromEntries( Object.entries(measurements) )`), only one 'thermistor' entry is retained due to the JS Object prototype's inherent unique key characteristics. - -Then the `readData` helper function is called. This function reads the CSV data, which consists of `[sensor/quantity, time interval, initial value]`. It returns a Promise, which resolves if the resulting object (of type `InitialState`) is not empty. The result is chained to a `then` block, which requires that `unqSensorObj[sensor].currentValue=response[sensor].initialVal`, ensuring the CSV data has the correct sensor names and number of rows. - - - - - -### Index - -Gathers and exports all exports from relevant project files for ease of access - - - -### Sensors - -Self-contained module containing classes for the different sensor/measurement categories (Navigation, Pressure etc.). Each subclass inherits from the parent Sensor class, reducing repetitive code. The amount of different classes is as small as possible, each combined into groups of similar functionality. - -Classes are instantiated once each in `main.ts`. The Sensor class is constructed with a single `LiveMeasurement` sensor object, and its properties are set as `protected readonly`, allowing sub-classes to access them but placing a data barrier from external functions to access or modify these values. - -The `_currentValue` variable is mutable, so that only a single instance per class is needed, updating its current value. - -Not all sensors/measurements require their own class instance. For instance, in the navigation category only velocity (or the accelerometer reading) needs to be recorded in the class instance. From this value and given a reasonable function of time, the other navigation variables can be calculated accordingly at each time step using basic kinematics and calculus of limits. Additionally, many other measurements have a dependency on pod velocity, generally increasing in proportion to the speed. Temperature is another measurement which dictates certain others, like reservoir pressure for example. - -Read more about sensors here. - - - -### Data Manager - - - -## Main - -Main functional file. After importing all necessary objects, classes and functions, it runs the `GenerateDataSeries` iterative function, updating the `dataControl` instance at each iteration. - -Imports the adapted `rangeSensors` object from `config.ts` as well as the Sensor class and its sub-classes from `sensors.ts`, which each consist of similar functionality shared by a category of sensors such as Navigation, Pressure etc. - -Runs the main loop with user-defined parameters, with the actual functionality and data management in the other files. - - - -## To-do list - -
        -
      1. Complete config.ts
      2. -
      3. Read the sensor specs for more info to use to estimate functionality specifics, estimated noise reduction quality, etc. to make the data generation more reflective of reality. Add more sensor properties if appropriate.
      4. -
      5. Add noise and sensor type properties existing pod data with any new changes from the sensor spec sheet
      6. -
      7. Add general functionality in Utilities
      8. -
          -
        • Write an async function for user to read sensor parameters CSV
        • -
        • Write an async function for user to modify CSV sensor parameters
        • -
        • Write function to optimise data generation complexity for any given set of user-defined sensor reading time steps
        • -
        • Broadcast data live to an animated GUI graph
        • -
        • Write a function to generate noise
        • -
        • Write an exponential moving average method with parameters alpha and window (amount of recent data points to average)
        • -
        - -
      9. sensors.ts -
          -
        • Create logical functions for next data points for all sensor groups
        • -
        • Define sensor reading hierarchy
        • -
        -
      10. -
      11. dataControl.ts -
          -
        • Create data storage functionality and object interface
        • -
        • Change data access methods to get and set
        • -
        • Upload data values to server within the updateData method
        • -
        • Combine randomise and default generateData methods into one with conditional logic
        • -
        • Add functionality for cases of different timesteps for different sensors which may depend on each other
        • -
        -
      12. -
      13. main.ts -
          -
        • Provide user freedom to modify parameters with CLI input:
        • -
            -
          • Data generation type: random/logical
          • -
          • Sensor-specific time intervals at which readings are generated
          • -
          • Total runtime for the data generation loop
          • -
          - -
        • Instantiate sensor classes and finish loop functionality
        • -
            -
          • Plan the logic conceptually
          • -
          • Program it into the loop
          • -
          - -
        -
      14. Create logical and modular file structure (separation of concerns)
      15. -
      16. Refactor: -
          -
        • Minimise code and amount of classes as much as possible
        • -
        • Review file structure and ensure it is logical, readable and non-repetitive
        • -
        -
      17. Remove comments
      18. -
    - - - - -
    - -## Sensors Structure - -Every single sensor is "reliant" upon velocity or temperature readings for fake data generation -Temperature is a function of velocity. Thus every single sensor needs velocity's currentValue to update. - -Some have dependencies on both such as pressure, but as vel and temp. are interdependent it is akin to just -being dependent on velocity. I.e. if, say, brakes pressure reservoir=K _ temp, and temp=M _ velocity, -then pressure=K _ M _ velocity - -Therefore every sensor needs velocity at that instant. So velocity must be calculated at the lowest timestep of -any sensor, even if the accelerometer does not update as frequently. Otherwise, we could estimate the velocity -value through linear interpolation. - -In terms of classes: Sensor parents all. Navigation returns the values of displacement, velocity and acceleration from the accelerometer sensor. This fake data needs some reasonable function to follow, so a logistic curve for velocity was chosen. The derivative of this function is the acceleration. We can ensure it peaks at just below maxAcceleration. But even though the accelerometer is the sensor, if we have a related function for velocity then it's equivalent to having a velocity sensor instead. So it makes sense to use the velocity logistic curve as the governing variable. Acceleration will be its discrete derivative (vel - prevVel) / dt. Displacement is simply disp=prevVel _ currentTime + 1/2 a _ currentTime\*\*2. - -Some sensors like Hall Effect don't even care about temperature. They are solely simple functions of velocity. - -So we calculate velocity at every timestep. - -Classes & Subclasses - -
      -
    • Sensor
    • -
        -
      • Navigation
      • -
      - -So let's say the user wants to get all variables. He sets the same timestep of 500ms for all of them.\ - -
      - -#### Features to be added - -
        -
      • Prompt user to edit time steps for each sensors' readings as desired:
      • -
          -
        1. Print all sensors along with their default time step
        2. -
        3. > Edit timesteps? [y/n]
        4. -
        5. If 'y', cycle through each sensor and ask: >[sensor name]: enter preferred timestep in ms (e.g. 500) or press Enter to move to next sensor
        6. -
        7. Update initial conditions object with any altered timesteps
        8. -
        -
      • Prompt user to edit initial conditions by uploading a csv file or changing specific values through command prompt
      • -
          - - - - - -## Next Steps - -
            -
          1. Code in the noise. Shouldn't be difficult, just use a function based on Math.random and weight the amplitude of the noise to some extent. Decide whether how 'noisy' data is might be dependent upon certain vasriables such as speed. I'm not sure so ask GPT or research this.
          2. -
          3. Create moving average function with ```window``` parameter set to 5 as default. Look into exponential moving average too. The average value will be used to determine whether a reading is out of bounds or it's just the noise.
          4. -
          5. Find out how the noise levels compare from different sensors e.g. thermistors, pressure gauges, digital sensors, navigation etc.
          6. -
          7. Create a simple function for reservoir pressure, it will not vary by much, but increase with temperature slightly. Reading will have some noise.
          8. -
          9. Write functions for the other pressures. Push=acceleration, so front pressure goes up and back goes down (both go further away from atmospheric, their absolute gauge pressure increases). And pull=decelleration, so the opposite. Also double check with David that you're interpreting the pressure variable terms correctly.
          10. -
          11. I am assuming that the accelerometer(s) are all supposed to measure the absolute pod acceleration with respect to a stationay observer, and that's how the navigation parameters are found. Except this assumption seems wrong, as the accelerometer has a range of -150 to 150 m/s^2 while acceleration can only go up to 5 m/s^2. Is this perhaps referring to the sensors' physical limits of its capability to read acceleration, while the pod itself is not built to exceed 5m/s^2? In other words, the accelerometer will be limited to the 0-5 range, it's just not "critical" for the sensor in terms of safety, it's critical for the pod's safety. Perhaps. But another uncertainty is that the pod's acceleration can be easily determined by it's speed, which - I assume - we are controlling. Update: wrong, we are not controlling speed. We just switch on the power and track it using the accelerometer. The sensor's operational range is +-150. Above 150 it won't read the acceleration accurately. The pod cannot exceed 5. We have one sensor to measure navigation quantities, and that's the accelerometer. It has an operating range of -150 to 150. This specific pod prototype, Pod Ness, has an operating acceleration range of 0 to 5 (presumably this means -5 to 5). We generate fake data for the accelerometer sensor, and analyse it with the view of keeping pod acceleration below 5, and we also calculate other navigation quantities starting from acceleration.
          12. -
          -
          - -
            - Thursday notes from discussion with David + discussion with Damen about the public app, React and Three.js
            -
          1. Separate all sensor data into its own file. This sensor data structure will be like the one in pods, but reduced to the data-gen relevant sensors, and with added/removed properties. Outside of this file, no other functional part of the program will be able to modify or access the file. The data file will be imported. Its properties are constant (so no 'currentVal' or 'movingAvg'). They are properties inherent to the sensor, like with the pod object. A new property David would like is a sampling rate property. This will define the delta T for that sensor's data generation, giving us a lot more freedom and making the code run a lot quicker as right now, there's only one global delta T variable so if we needed say 0.05s, all variables would be measured twenty times per second which would be unnecessary and slow. Of course, the value of this property will be changed and modified by our team, but not during runtime. This is a fixed object which is exporting its data for the data generation function to run. Another object or array will store the transient values, which we will also send live to the server as they are calculated and a graph can be animated in real time.
          2. -
          3. One issue I foresee is that if different sensors have different delta Ts, it will make the main loop more complicated. Say the thermistor takes a reading every 0.2s. The accelerometer every 0.5s. So we'd run the loop ever 0.2, while checking if the time is also a multiple of 0.5 (and all the other sensors' times). To mitigate this slightly, I will add functionality to the ```specific``` parameter, so we can view a select few variables in one run, or all of them if we want to.
          4. -
          5. Different sensors will have a new property which defines its time interval (dt) and perhaps its degree of noise (as higher quality sensors would have less noise due to better electronic circuits)
          6. - Separate the sensor file into a new file with the sensor object and its properties relevant to the data generation -
          7. The fake data generation program will be placed into its own directory
          8. -
          9. It's only for internal use, but there are restrictions/rules we need to follow from EHW committee
          10. -
          11. Add a method to the data manager class to upload the data at each step of the iteration to the server/mqtt so we can view it live (graph animation)
          12. -
          13. We don't have complete or current data on all the sensors we're using and we'll need to ask electronics team to fill in a spreadsheet with the data for range limits, (critical and warning and expected/nominal levels)
          14. -
          15. Run the logical data gen methods past the electronics/other team to see if they agree it makes sense
          16. -
          - - diff --git a/telemetry/packages/fake/biome.json b/telemetry/packages/fake/biome.json deleted file mode 100644 index 784b6d85..00000000 --- a/telemetry/packages/fake/biome.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": ["../../biome.json"] -} diff --git a/telemetry/packages/fake/package.json b/telemetry/packages/fake/package.json deleted file mode 100644 index 853d898e..00000000 --- a/telemetry/packages/fake/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "@hyped/telemetry-fake", - "private": true, - "version": "0.0.1", - "scripts": { - "dev:test": "node dist/index.js", - "build": "dts build", - "build:fake": "dts build", - "lint": "biome lint .", - "lint:fix": "biome check --write ." - }, - "dependencies": { - "@hyped/telemetry-constants": "workspace:*", - "mqtt": "^5.3.6", - "tslib": "^2.5.3" - }, - "devDependencies": { - "@hyped/telemetry-types": "workspace:*", - "@hyped/tsconfig": "workspace:*", - "dts-cli": "^2.0.5", - "typescript": "^5.7.3" - } -} diff --git a/telemetry/packages/fake/src/base.ts b/telemetry/packages/fake/src/base.ts deleted file mode 100644 index 0a4de4ee..00000000 --- a/telemetry/packages/fake/src/base.ts +++ /dev/null @@ -1,67 +0,0 @@ -import type { Limits } from '@hyped/telemetry-types'; -import type { LiveReading, Readings } from './types'; -import { Utilities } from './utils'; - -export abstract class Sensor { - // Define static objects, updated each timestep // - // Records the actual time each sensor should be sampled next - // This object refers to sensors' sampling times to monitor the next time for each sensors' reading (in real time) - public static nextSamplingTimes: Record; - // Records whether each sensor has been sampled at the current time with a boolean flag for each - public static isSampled: Record = {}; - // Stores most recent sensor readings for all sensors, accessible by all sensors - // Null is used to indicate that the sensor has not been sampled at the current time - public static lastReadings: Record = {}; - - // Sensor properties - readonly type: string; // sensor type (same as the name of the object in sensorData) - readonly format: 'float' | 'integer'; // for random ternary logic (keyence is integer, rest are float) - readonly limits: Limits; - readonly rms_noise: number; - readonly delta_t: number; - - // Variable sensor data - protected time: number; // current time in seconds - - // Extract relevant properties from sensor data entries - constructor({ - type, - format, - limits, - rms_noise, - sampling_time, - readings, - }: LiveReading) { - Object.assign(this, { - type, - format, - limits, - rms_noise, - }); - this.delta_t = sampling_time / 1000; // convert ms to s - this.time = 0; - // Add initial sensor values to global readings object - Sensor.lastReadings[this.type] = readings; - } - - /** - * Main data gen method shared by all sensors - * Returns the Readings object, filtered into the values to be published with MQTT - * For motion, only acceleration, velocitity and displacement are uploaded - * Accelerometers are used to estimate readings, then these values are - * propagated to the three variables above - * @param t time in seconds - */ - abstract getData(t: number): Readings; - - getRandomData(readings: Readings): Readings { - for (const unit in readings) { - readings[unit] = Utilities.getRandomValue( - readings[unit], - this.rms_noise, - this.format, - ); - } - return readings; - } -} diff --git a/telemetry/packages/fake/src/config.ts b/telemetry/packages/fake/src/config.ts deleted file mode 100644 index 94c84f77..00000000 --- a/telemetry/packages/fake/src/config.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { pods } from '@hyped/telemetry-constants'; -import type { Pod, RangeMeasurement } from '@hyped/telemetry-types'; -import type { LiveReading, SensorData } from './types'; - -type podID = keyof typeof pods; - -/** - * Extracts and categorises relevant sensor data - */ -const filterMeasurements = (id: podID) => { - const pod = Object.values(pods).find((pod: Pod) => pod.id === id) as Pod; - const filteredData = {} as Record; - // - for (const [key, meas] of Object.entries(pod.measurements)) { - if (meas.format !== 'enum') { - filteredData[key] = meas; - filteredData[key].name = key.replace(/_[^_]*\d$/, ''); - } - } - return filteredData; -}; - -export const measurements = filterMeasurements('pod_2024'); - -/** - * Gets an arbitrary initial value for each reading - * Testing functionTo be replaced with user defined params fetched from GUI - * @param data a key - value item from the measurements object - * @returns initial value for a given sensor/measurement - */ -const getInitialValue = (data: RangeMeasurement): number => { - // Define initial conditions - const initialVals: Record = { - accelerometer: 0, - acceleration: 0, - displacement: 0, - // velocity: measurements.velocity.limits.critical.high * 0.1, // initial velocity > 0 for continuity of logistic function - velocity: 0.3, // m/s (this aligns closely with logistic curve y-intercept) - pressure: data.name.endsWith('reservoir') ? 5 : 1, - thermistor: 25, - keyence: 0, - hall_effect: 0, - levitation_height: 0, - power_line_resistance: 10, - }; - - // Set initial value based on sensor types defined above - if (Object.prototype.hasOwnProperty.call(initialVals, data.name)) { - return initialVals[data.name]; - } - if (data.name.startsWith('pressure')) { - // Pressure gauges are subdivided into push, pull, brake and reservoir with different initial values - return initialVals.pressure; - } - // If the sensor is not recognised, return a random value within the critical limits - const { low, high } = data.limits.critical; - return Math.floor(Math.random() * (high - low)) + low; -}; - -/** - * Create new object to store existing and additional sensor parameters - * Groups sensors by data source by replacing sensor type with group's type - */ -export const sensorData: SensorData = Object.fromEntries( - Object.values(measurements) - .reduce( - // biome-ignore lint/suspicious/noExplicitAny: - (acc, sensor): any => { - if (!acc.seen) acc.seen = new Set(); - // Check if the sensor key has already been processed - if (!acc.seen.has(sensor.type)) { - acc.seen.add(sensor.type); - // Add one key value pair for each sensor type - // Each type holds all data on its constituent sensors - acc.entries.push([sensor.type, sensor]); - } - return acc; - }, - { seen: new Set(), entries: [] as [string, RangeMeasurement][] }, - ) - .entries // Set new readings property to the sensors' initial conditions - .map(([name, data]: [string, RangeMeasurement]) => [ - name, - { - ...data, - // Create object with a key-value pair for each measurement of a given sensor type - readings: Object.fromEntries( - Object.keys(measurements) - .filter( - (name) => - !name.endsWith('avg') && measurements[name].type === data.type, - ) - .map((el) => [el, getInitialValue(measurements[el])]), - ), - } as LiveReading, - ]), -); - -// Parameter storing distance of track, once finish point is reached program will end -export const trackLength = measurements.displacement.limits.critical.high; diff --git a/telemetry/packages/fake/src/index.ts b/telemetry/packages/fake/src/index.ts deleted file mode 100644 index 15461d7f..00000000 --- a/telemetry/packages/fake/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { sensorData } from './config'; -/** - * Main file which initialises the generation of the data series and uploads to GUI in real time. - * @param runTime (CLI) simulation time in ms (not real time, based on sensor timesteps) - * @param random (CLI) option to simulate random data - later to be replaced with a config object - * which allows user to randomise select sensor readings. Default is false - * @param specific (CLI) an array of specific sensor readings to simulate. Default is false - * i.e. simulate all sensors - */ -import { SensorManager } from './sensorManager'; - -const args = process.argv.slice(2); -const shouldRandomise = !!args.includes('--random'); - -// Filter for user-defined specific sensors, otherwise simulate all -Object.keys(sensorData).filter((sensor) => args.includes(sensor)); - -// Ensure input sensor options are valid and format them appropriately -const sensorsToRun = args.includes('--specific') - ? args - .slice(args.indexOf('--specific') + 1) - .map((s: string) => s.toLowerCase()) - .filter((s: string) => - Object.prototype.hasOwnProperty.call(sensorData, s), - ) - : Object.keys(sensorData); - -// Instantiate sensor manager -const sensorMgmt = new SensorManager(sensorsToRun); - -// Simulates sensor readings and uploads to the server -sensorMgmt.generateData(shouldRandomise); diff --git a/telemetry/packages/fake/src/sensorManager.ts b/telemetry/packages/fake/src/sensorManager.ts deleted file mode 100644 index 5790c757..00000000 --- a/telemetry/packages/fake/src/sensorManager.ts +++ /dev/null @@ -1,143 +0,0 @@ -import MQTT from 'mqtt'; -import { Sensor } from './base'; -import { sensorData, trackLength } from './config'; -import { type SensorInstance, sensors } from './sensors/index'; -import type { Readings } from './types'; -import { Utilities as utils } from './utils'; - -export class SensorManager { - // Create array to store sensor instances - private sensors: SensorInstance<(typeof sensors)[keyof typeof sensors]>[] = - []; - // Record the sampling intervals for each sensor - private samplingTimes: Record = {}; - // Global clock for simulation runtime - private globalTime = 0; - // Mqtt client - private client: MQTT.MqttClient; - - /** - * The sensors form a hierarchical dependency tree - * At the top level is Motion, which relies only on time - * All other sensor data relies on motion readings, either - * directly or as a grandchild of the motion class - * Key = sensor - * Value = parent class - */ - private dependencies: Record = { - motion: null, - keyence: 'motion', - temperature: 'motion', - pressure: 'temperature', - resistance: 'temperature', - magnetism: 'motion', - levitation: 'magnetism', - }; - - /** - * Sensor manager singleton class - * Controls and connects all sensor classes - * @param sensorsToRun user-defined array of sensor names to be run in the current simulation - */ - constructor(private sensorsToRun: string[]) { - // Create sensor instances - this.instantiateSensors(this.sensorsToRun); - // Record fixed sampling time periods - for (const s of this.sensorsToRun) { - this.samplingTimes[s] = sensorData[s].sampling_time; - } - // Initialize MQTT connection - this.client = MQTT.connect('MQTT://mosquitto:1883'); - } - - /** - * Runs transient data generation - * Updates global variables each iteration - * End program once runTime has been reached - * @param random boolean input set by user, allows for completely random data - */ - public generateData(random = false): void { - // Calculate base sampling interval using lowest common divisor of all sensors' sampling periods - const interval = utils.gcd(Object.values(this.samplingTimes)); - - const simulationInterval = setInterval(() => { - // Reset all 'sampled' flags to false - this.resetSampledState(); - for (const sensor of this.sensors) { - // Generate data if current time corresponds to sensor's sampling time - if ((this.globalTime / 1000) % sensor.delta_t === 0) { - // Get the sensors' output data - const readings: Readings = !random - ? sensor.getData(this.globalTime / 1000) // convert time to seconds for calculations - : sensor.getRandomData(Sensor.lastReadings[sensor.type]); - // Store latest readings and set sensors' sampled state to true - Sensor.lastReadings[sensor.type] = readings; - Sensor.isSampled[sensor.type] = true; - - // Publish sensor readings under the topic of - // each and for each measurement key to the data broker - for (const [measurement, value] of Object.entries(readings)) { - this.publishData(measurement, value.toString()); - } - } - } - - // Implement exit condition - if (Sensor.lastReadings.motion.displacement >= trackLength) { - clearInterval(simulationInterval); - this.generateData(random); - } - - this.globalTime += interval; - }, interval); - } - - /** - * Instantiate sensors and their required superclasses and store instances in array - */ - private instantiateSensors(sensorsToRun: string[]): void { - // Record sensors names to be added to instances array - const activeSensors: Set = new Set(); - // Function to activate all sensors required - const getActiveSensors = (name: string): void => { - if (this.dependencies[name] == null) { - activeSensors.add(name); - } else { - getActiveSensors(this.dependencies[name] as string); - activeSensors.add(name); - } - return; - }; - // Populate set - for (const s of sensorsToRun) { - getActiveSensors(s); - } - // Define sensor instances - // Correct sorting is automatic as recursion forces all parent class sensors to be - // added before their inheriting classes - for (const s of Array.from(activeSensors)) { - this.sensors.push(new sensors[s](sensorData[s])); - } - } - - /** - * Reset all sensors' isSampled flags to false on each iteration - */ - private resetSampledState(): void { - for (const sensor of Object.keys(Sensor.isSampled)) { - Sensor.isSampled[sensor] = false; - } - } - - /** - * Uploads data through MQTT broker to the frontend - * The properties of the sensors' readings objects are the keys which are appended to the topic path, i.e. ...measurements/[key] - * So simply append the key and publish the value as the payload - * Subscribed clients extract values using payload[measurementKey] - */ - private publishData(measurement: string, reading: string): void { - this.client.publish(`hyped/pod_2024/measurement/${measurement}`, reading, { - qos: 1, - }); - } -} diff --git a/telemetry/packages/fake/src/sensors/index.ts b/telemetry/packages/fake/src/sensors/index.ts deleted file mode 100644 index bf3d41e5..00000000 --- a/telemetry/packages/fake/src/sensors/index.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { Keyence } from './keyence'; -import { Levitation } from './levitation'; -import { Magnetism } from './magnetism'; -// Individual sensor classes -import { Motion } from './motion'; -import { Pressure } from './pressure'; -import { Resistance } from './resistance'; -import { Temperature } from './temperature'; - -type SensorType = - | typeof Motion - | typeof Keyence - | typeof Pressure - | typeof Temperature - | typeof Resistance - | typeof Magnetism - | typeof Levitation; - -// Instance type for sensor classes -// biome-ignore lint/suspicious/noExplicitAny: -export type SensorInstance any> = - InstanceType; - -// Export object containing all sensor classes -export const sensors = { - motion: Motion, - keyence: Keyence, - temperature: Temperature, - resistance: Resistance, - pressure: Pressure, - magnetism: Magnetism, - levitation: Levitation, -} as Record; diff --git a/telemetry/packages/fake/src/sensors/keyence.ts b/telemetry/packages/fake/src/sensors/keyence.ts deleted file mode 100644 index 57e22ff5..00000000 --- a/telemetry/packages/fake/src/sensors/keyence.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { Sensor } from '../base'; -import { trackLength } from '../config'; -import type { LiveReading, Readings } from '../types'; -import { Motion } from './motion'; - -/** - * Integer value in range [0, 16], which directly corresponds to the track distance. - * Live data takes the form of a staircase with varying step width - * Instead of sensor noise there is measurement tolerance - */ -export class Keyence extends Motion { - private podLength = 2.5; - - getData(t: number): Readings { - // Keyence sensors are evenly distributed along the pod - // Displacement is measured at the nose of the pod - const sensorRegion = - this.podLength / (Object.keys(Sensor.lastReadings.keyence).length - 1); - const noPoles = this.limits.critical.high; - - if (!Sensor.isSampled.motion) { - this.displacement = super.getData(t).displacement; - Sensor.isSampled.motion = true; - } else { - this.displacement = Sensor.lastReadings.motion.displacement; - } - - this.displacement += this.addTolerance(); - - return Object.fromEntries( - Object.keys(Sensor.lastReadings.keyence).map((key, i) => { - const relDisp = - this.displacement - sensorRegion * i >= 0 - ? this.displacement - sensorRegion * i - : 0; // assert value is positive - return [key, Math.floor(relDisp * (noPoles / trackLength))]; - }), - ); - } - - /** - * Keyence sensor has single-digit millimetre tolerance - */ - addTolerance() { - return Math.random() * 0.01 * (Math.random() >= 0.5 ? 1 : -1); - } -} diff --git a/telemetry/packages/fake/src/sensors/levitation.ts b/telemetry/packages/fake/src/sensors/levitation.ts deleted file mode 100644 index d3ce3e82..00000000 --- a/telemetry/packages/fake/src/sensors/levitation.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { Sensor } from '../base'; -import type { LiveReading, Readings } from '../types'; -import { Utilities } from '../utils'; -import { Magnetism } from './magnetism'; - -export class Levitation extends Magnetism { - private timeActive: number; // dynamic time variable - private timeOffset: number; // time at which lev. is activated - private isActive = false; - - private prevVals: number[]; // store recent values to identify reaching steady state - private setpoint = 50; // mm - private inSteadyState = true; - private sse = 0.02; // steady state error (±1% of setpoint) - - private readonly logRiseParams = { - t_0: 1.9, // inflection point - growth: 2.5, // growth rate - t_f: 2.225, // time when logistic function switches to sinusoidal exp. decay - l_peak: 60, // peak amplitude - }; - private readonly oscParams = { - freq: 4, // rad/s - phase: 3.7, // phase angle (rad) - decay: 0.2, // decay rate - amp: this.logRiseParams.l_peak - this.setpoint, // oscillation amplitude - }; - private readonly logFallParams = { - t_0: 2.5, // land on track within 5s - growth: 1.5, // decline smoother than rise - }; - - constructor(data: LiveReading) { - super(data); - // 10 values ensures minimal error for small sampling times - // and sufficient reaction time for large sampling times - this.prevVals = Array(10).fill(0); - } - - // Reset time at rising and falling stages - initiate(t: number) { - this.timeOffset = t; - this.timeActive = 0; - this.isActive = !this.isActive; - this.inSteadyState = false; - } - - getData(t: number): Readings { - // Start relative timekeeping when EM powers on - if (this.isFieldOn() && !this.isActive) this.initiate(t); - - // Keep at zero while on track - // ToF sensor noise negated/ignored as value is fixed and known - if (!this.isActive && this.inSteadyState) - return Sensor.lastReadings.levitation; - - // Code below reachable after EM field first turns on - - // Update levitating time - this.timeActive = t - this.timeOffset; - - // Rising stage - if (this.isActive) { - if (this.timeActive <= this.logRiseParams.t_f) { - this.prevVals.push( - Utilities.logistic( - this.timeActive, - this.logRiseParams.l_peak, - this.logRiseParams.growth, - this.logRiseParams.t_0, - ), - ); - this.prevVals.shift(); - } - - // Oscillation to steady state - else if ( - !this.inSteadyState && - this.timeActive > this.logRiseParams.t_f - ) { - this.prevVals.push( - this.setpoint + - Utilities.oscillateDecay( - this.timeActive - this.logRiseParams.t_f, - this.oscParams.freq, - this.oscParams.phase, - this.oscParams.decay, - this.oscParams.amp, - ), - ); - this.prevVals.shift(); - - this.inSteadyState = this.prevVals.every( - (l) => Math.abs(this.setpoint - l) < (this.sse / 2) * this.setpoint, - ); - } - - // If at steady state, fix value to avoid unneeded computation - else { - this.prevVals.push(this.setpoint); - this.prevVals.shift(); - } - - // Monitor EM field during steady state - if (this.isFieldOn()) return this.updateReadings(this.prevVals); - // Reset time for decline stage - this.initiate(t); - } - - // Code below reachable once pod decline begins - - // Once pod slows down, begin gradual decline in levitation - if (!this.inSteadyState) { - this.prevVals.push( - this.setpoint * - (1 - - Utilities.logistic( - this.timeActive, - 1, - this.logFallParams.growth, - this.logFallParams.t_0, - )), - ); - - this.inSteadyState = this.prevVals.every( - (l) => Math.abs(l) < (this.sse / 2) * this.setpoint, - ); - - return this.updateReadings(this.prevVals); - } - // Once touched down, reset sensor readings to zero - return this.updateReadings([], true); - } - - private updateReadings(prevVals: number[], onTrack = false): Readings { - return Object.fromEntries( - Object.keys(Sensor.lastReadings.levitation).map((key) => { - return [ - key, - Utilities.round2DP( - prevVals.slice(-1)[0] + - // Add noise if levitating, otherwise value is fixed so noise is negated - (onTrack ? 0 : Utilities.gaussianRandom(this.rms_noise)), - ), - ]; - }), - ); - } -} diff --git a/telemetry/packages/fake/src/sensors/magnetism.ts b/telemetry/packages/fake/src/sensors/magnetism.ts deleted file mode 100644 index 732aad83..00000000 --- a/telemetry/packages/fake/src/sensors/magnetism.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Sensor } from '../base'; -import type { LiveReading, Readings } from '../types'; -import { Utilities as utils } from '../utils'; -import { Motion } from './motion'; - -export class Magnetism extends Motion { - protected magSetpoint = 250; // A - - getData(t: number): Readings { - if (!Sensor.isSampled.motion) { - this.velocity = super.getData(t).velocity; - Sensor.isSampled.motion = true; - } else { - this.velocity = Sensor.lastReadings.motion.velocity; - } - - return Object.fromEntries( - Object.keys(Sensor.lastReadings.magnetism).map((key) => { - // binary on or off - instantaneous step change - return [ - key, - (this.velocity >= this.liftoffSpeed ? this.magSetpoint : 0) + - utils.gaussianRandom(this.rms_noise), - ]; - }), - ); - } - - /** - * For subclasses to check if EM field is powered on - * EM field only switched on when corresponding velocity is detected - * Hence use of last known velocity value instead of generating a new one - */ - protected isFieldOn(): boolean { - this.velocity = Sensor.lastReadings.motion.velocity; - return this.velocity >= this.liftoffSpeed; - } -} diff --git a/telemetry/packages/fake/src/sensors/motion.ts b/telemetry/packages/fake/src/sensors/motion.ts deleted file mode 100644 index 70bd81d7..00000000 --- a/telemetry/packages/fake/src/sensors/motion.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { Sensor } from '../base'; -import { measurements } from '../config'; -import type { LiveReading, Readings } from '../types'; -import { Utilities } from '../utils'; - -export class Motion extends Sensor { - protected displacement: number; - protected velocity: number; - protected acceleration: number; - // Velocity threshold at which levitation is activated - protected liftoffSpeed = 5; - - private logParams = { - growth: 0.4, - // Ensures acceleration peaks at its limiting operating value - t_0: 12.5, - // Max vel. set to 95% of upper limit giving a small margin for noise fluctuations - stState: 0.95 * measurements.velocity.limits.critical.high, - }; - - constructor(accelerometer: LiveReading) { - super(accelerometer); - const { displacement, velocity, acceleration } = Sensor.lastReadings.motion; - Object.assign(this, { displacement, velocity, acceleration }); - } - - getData(t: number): Readings { - const velocityEstimate = Utilities.logistic( - t, - this.logParams.stState, - this.logParams.growth, - this.logParams.t_0, - ); - - // Use estimate to calculate accelerometer reading - let accelerometerReading = - (velocityEstimate - this.velocity) / this.delta_t; - // Assert reading is not above critical limit - accelerometerReading = - accelerometerReading >= this.limits.critical.high - ? this.limits.critical.high - : accelerometerReading; - accelerometerReading += Utilities.gaussianRandom(this.rms_noise); - - // Use trapezoidal integration to find velocity, displacement - const avgAcceleration = (accelerometerReading + this.acceleration) / 2; - this.velocity += avgAcceleration * this.delta_t; - this.displacement += this.velocity * this.delta_t; - this.acceleration = accelerometerReading; - - return { - acceleration: Utilities.round2DP(this.acceleration), - velocity: Utilities.round2DP(this.velocity), - displacement: Utilities.round2DP(this.displacement), - }; - } -} diff --git a/telemetry/packages/fake/src/sensors/pressure.ts b/telemetry/packages/fake/src/sensors/pressure.ts deleted file mode 100644 index 1c9d5703..00000000 --- a/telemetry/packages/fake/src/sensors/pressure.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { Sensor } from '../base'; -import type { LiveReading, Readings } from '../types'; -import { Utilities as utils } from '../utils'; -import { Temperature } from './temperature'; - -export class Pressure extends Temperature { - private pResBrakes0: number; - private pResSusp0: number; - - private airProps = { - rho: 1.225, // air density (kg/m3) - atm: 101325, // atmospheric pressure (Pa) - }; - private coefficients = { - lossFactor: 0.05, // 5% internal pressure losses - stagnation: 0.5, - wake: 0.25, - brakingFactor: 0.05, // to calculate pressure increase due to braking force - }; - - constructor(data: LiveReading) { - super(data); - // this.prevVals = Array(10).fill(0); - this.pResBrakes0 = data.readings.pressure_brakes_reservoir; - this.pResSusp0 = data.readings.pressure_active_suspension_reservoir; - } - - getData(): Readings { - const newData = { ...Sensor.lastReadings.pressure }; - // Pneumatic pressure gauges - newData.pressure_front_pull = this.bernoulli('stagnation'); - newData.pressure_front_push = - (1 - this.coefficients.lossFactor) * newData.pressure_front_pull; - newData.pressure_back_pull = this.bernoulli('wake'); - newData.pressure_back_push = - (1 - this.coefficients.lossFactor) * newData.pressure_back_pull; - - // Reservoir pressure - newData.pressure_brakes_reservoir = this.idealGasLaw('brakes'); - newData.pressure_active_suspension_reservoir = - this.idealGasLaw('suspension'); - - // Brake pressure - newData.pressure_front_brake = this.brakePressure(); - newData.pressure_back_brake = newData.pressure_front_brake; - - return Object.fromEntries( - Object.entries(newData).map(([key, value]) => { - return [ - key, - utils.round2DP( - (value + utils.gaussianRandom(this.rms_noise)) * 10 ** -5, // convert back to bar - ), - ]; - }), - ); - } - - private bernoulli(loc: 'stagnation' | 'wake'): number { - const { rho, atm } = this.airProps; - return atm + this.coefficients[loc] * (rho * this.velocity ** 2); - } - - private idealGasLaw(loc: 'brakes' | 'suspension'): number { - this.temp += - this.acceleration < 0 && loc === 'brakes' - ? Math.abs(this.acceleration) * this.coefficients.brakingFactor - : 0; - return loc === 'brakes' - ? this.pResBrakes0 * (this.cToK(this.temp) / this.cToK(this.temp0)) - : this.pResSusp0 * (this.cToK(this.temp) / this.cToK(this.temp0)); - } - - private brakePressure(): number { - const { atm } = this.airProps; - const p = Sensor.lastReadings.pressure.pressure_front_brake; - if (this.acceleration > 0 && p > atm) { - return p - 100; // arbitrary value for pressure drop - } - if (this.acceleration > 0) { - return atm; - } - this.temp += - Math.abs(this.acceleration) * 5 * this.coefficients.brakingFactor; - return atm * (this.cToK(this.temp) / this.cToK(this.temp0)); - } - - private cToK(temp: number): number { - return temp + 273.15; - } -} diff --git a/telemetry/packages/fake/src/sensors/resistance.ts b/telemetry/packages/fake/src/sensors/resistance.ts deleted file mode 100644 index 94cd0570..00000000 --- a/telemetry/packages/fake/src/sensors/resistance.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { Sensor } from '../base'; -import type { LiveReading, Readings } from '../types'; -import { Utilities as utils } from '../utils'; -import { Temperature } from './temperature'; - -export class Resistance extends Temperature { - private alpha = 5 * 10 ** -3; // Temperature coefficient of resistance (steel) - private r0: number; // Initial value - - constructor(data: LiveReading) { - super(data); - // Set reference value and convert to ohms for higher precision output values - this.r0 = Sensor.lastReadings.resistance.power_line_resistance * 10 ** 3; - } - - /** - * Resistance can be assumed constant, seeing little variation with temperature - * change. - * This data verifies the power line's continual safety by checking resistance - * is as expected during operation. - */ - getData(): Readings { - if (!Sensor.isSampled.temperature) { - this.temp = utils.average(Object.values(super.getData())); - Sensor.isSampled.temperature = true; - } - - const readings = Object.keys(Sensor.lastReadings.resistance).map((key) => { - // R = R0 * (1 + α(T - T0)) - const r = this.r0 * (1 + this.alpha * (this.temp - this.temp0)); - return [ - key, - utils.round2DP((r + utils.gaussianRandom(this.rms_noise)) * 0.001), - ]; - }); - - return Object.fromEntries(readings); - } -} diff --git a/telemetry/packages/fake/src/sensors/temperature.ts b/telemetry/packages/fake/src/sensors/temperature.ts deleted file mode 100644 index b74c3baa..00000000 --- a/telemetry/packages/fake/src/sensors/temperature.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { Sensor } from '../base'; -import type { LiveReading, Readings } from '../types'; -import { Utilities } from '../utils'; -import { Motion } from './motion'; - -export class Temperature extends Motion { - protected temp: number; - protected temp0: number; - - // Arbitrary coefficients for estimating temperature changes - private params = { - drag: 0.1, - friction: 0.3, - heatGen: 0.5, - }; - - constructor(data: LiveReading) { - super(data); - // Initial temp used for reference by subclass(es) - this.temp0 = Utilities.average(Object.values(data.readings)); - this.temp = this.temp0; - } - - getData(): Readings { - this.temp += // Air drag and internal heat generation - this.velocity ** 3 * this.params.drag + - this.velocity * this.params.heatGen; - this.temp += // On the track, temperature increases with work done - this.velocity < this.liftoffSpeed - ? this.displacement ** 2 * this.params.friction - : this.displacement ** 2 * - (this.liftoffSpeed / this.velocity) * - this.displacement * - this.params.friction; - - return Object.fromEntries( - Object.keys(Sensor.lastReadings.temperature).map((key) => { - return [ - key, - Utilities.round2DP( - this.temp + Utilities.gaussianRandom(this.rms_noise), - ), - ]; - }), - ); - } -} diff --git a/telemetry/packages/fake/src/types.ts b/telemetry/packages/fake/src/types.ts deleted file mode 100644 index 03f50f36..00000000 --- a/telemetry/packages/fake/src/types.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { RangeMeasurement } from '@hyped/telemetry-types'; -/** - * Unique variable readinfgs each sensor provides - * E.g. accelerometers generate values for acceleration, displacement and velocity - */ -export type LiveReading = RangeMeasurement & { readings: Readings }; - -export type SensorData = Record; - -/** - * Sensor property containing values for each of its measured quantities - */ -export type Readings = { - [measurement: string]: number; -}; - -/** - * Return type for sensor class instantiation - */ -export type BaseSensor = { - getData: (t: number) => Readings; - getRandomData: (prevValue: number, readings: Readings) => Readings; -}; diff --git a/telemetry/packages/fake/src/utils.ts b/telemetry/packages/fake/src/utils.ts deleted file mode 100644 index 9c39249a..00000000 --- a/telemetry/packages/fake/src/utils.ts +++ /dev/null @@ -1,119 +0,0 @@ -export class Utilities { - /** - * Greatest common divisor - */ - public static gcd(nums: number[]): number { - const sortedNums = nums.sort((a, b) => b - a); - return sortedNums.reduce((acc, c) => { - return c === 0 ? acc : Utilities.gcd([c, acc % c]); - }); - } - - /** - * Simple floating point rounding method - * @param num - * @returns - */ - public static round2DP(num: number): number { - return Number.parseFloat(num.toFixed(2)); - } - - /** - * Generates random noise value from a Gaussian distribution - * @param mean self-explanatory - * @param std_dev sensor's RMS noise value, used as the standard deviation - * @returns random number defined by the normal distribution of stdDev = RMS noise - */ - public static gaussianRandom(std_dev: number, mean = 0): number { - // Using the Box-Muller transform to generate random values from a normal distribution - const u1 = Math.random(); - const u2 = Math.random(); - const z = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2); - - return Number.parseFloat((z * std_dev + mean).toFixed(2)); - } - - /** - * Generates random value from random distribution defined by provided range - * 99% probability of random value falling within critical limits - * This results in a z-score of 2.576 for confidence level of 99% - */ - public static getRandomValue( - prevValue: number, - rms_noise: number, - format: 'float' | 'integer', - ): number { - return format === 'float' - ? Number.parseFloat( - Utilities.gaussianRandom(rms_noise, prevValue).toFixed(2), - ) - : Number.parseInt( - Utilities.gaussianRandom(rms_noise, prevValue).toFixed(2), - ); - } - - /** - * Simple arithmetic mean - * @param values numerical sample - * @returns mean value - */ - public static average(values: number[]): number { - return values.reduce((acc, c) => acc + c) / values.length; - } - - /** - * Logistic function used as an analytical basis for dynamic variables which change over time - * @param t - current time - * @param peak - asymptotic maximum value - * @param k - exponential growth factor - * @param t0 - time of curve inflection (df²/dt² = 0) - * @returns f(t) - current reading according to idealised model - */ - public static logistic( - t: number, - peak: number, - k: number, // exponential growth rate factor - t0: number, // time at which second derivative reaches a stationary point - ): number { - return Number.parseFloat((peak / (1 + Math.exp(-k * (t - t0)))).toFixed(2)); - } - - /** - * Sinusoidal damped oscillation - * @param t - current time - * @param freq - angular frequency - * @param phase - phase shift (angle) - * @param decay - exponential decay factor - * @param amp - oscillation peak amplitude - * @returns f(t) - */ - public static oscillateDecay( - t: number, - freq: number, - phase: number, - decay: number, - amp: number, - ): number { - return Number.parseFloat( - (amp * Math.exp(-decay * t) * Math.cos(freq * t + phase)).toFixed(2), - ); - } - - /** - * Gets the exponential average of a recent set of values - * @param vals previous values (and chosen length of array) - * @param alpha weighting factor - * @returns exponentially weighted average - */ - public expMovingAvg(vals: number[], alpha: number): number | undefined { - if (alpha <= 0 || alpha > 1 || !vals.length) { - return; - } - let sum = 0; - vals.forEach((v, i) => { - const weight = alpha ** (vals.length - 1 - i); - sum += v * weight; - }); - return sum / (1 - alpha ** vals.length); - } -} diff --git a/telemetry/packages/fake/tsconfig.json b/telemetry/packages/fake/tsconfig.json deleted file mode 100644 index e362eff5..00000000 --- a/telemetry/packages/fake/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "@hyped/tsconfig/base.json", - "compilerOptions": { - "outDir": "./dist", - "lib": ["esnext"], - "importHelpers": true, - "sourceMap": true, - "rootDir": "./src", - "strictPropertyInitialization": false - }, - "include": ["src", "src/config.ts"], - "exclude": ["node_modules"] -} diff --git a/telemetry/packages/public-app/app/cards.tsx b/telemetry/packages/public-app/app/cards.tsx index 973d20a0..8650a892 100644 --- a/telemetry/packages/public-app/app/cards.tsx +++ b/telemetry/packages/public-app/app/cards.tsx @@ -1,12 +1,12 @@ -import { DisplacementChart } from "@/components/displacement-chart"; -import { LaunchTime } from "@/components/launch-time"; -import LevitationHeight from "@/components/levitation-height"; -import { SocialIcons } from "@/components/social-icons"; -import ThemeSwitch from "@/components/theme-switch"; -import { VelocityGraph } from "@/components/velocity-graph"; -import { Card, Grid, Text, Title } from "@tremor/react"; -import Image from "next/image"; -import { useState } from "react"; +import { DisplacementChart } from '@/components/displacement-chart'; +import { LaunchTime } from '@/components/launch-time'; +import LevitationHeight from '@/components/levitation-height'; +import { SocialIcons } from '@/components/social-icons'; +import ThemeSwitch from '@/components/theme-switch'; +import { VelocityGraph } from '@/components/velocity-graph'; +import { Card, Grid, Text, Title } from '@tremor/react'; +import Image from 'next/image'; +import { useState } from 'react'; /** * The cards that are displayed on the dashboard. @@ -20,7 +20,7 @@ const CARDS = { type CardType = keyof typeof CARDS; export default function Cards() { - const [selected, setSelected] = useState("VELOCITY"); + const [selected, setSelected] = useState('VELOCITY'); const selectedCardComponent = CARDS[selected]; const otherCards = (Object.keys(CARDS) as CardType[]).filter( @@ -63,7 +63,7 @@ export default function Cards() { */ const HypedImage = () => { const common = { - alt: "HYPED Logo, with a red E resembling 3 stacked hyperloop pods", + alt: 'HYPED Logo, with a red E resembling 3 stacked hyperloop pods', width: 200, height: 50, }; diff --git a/telemetry/packages/server/src/app.module.ts b/telemetry/packages/server/src/app.module.ts index aafa3874..481f1219 100644 --- a/telemetry/packages/server/src/app.module.ts +++ b/telemetry/packages/server/src/app.module.ts @@ -5,13 +5,13 @@ import { PodControlsModule } from './modules/controls/PodControls.module'; import { InfluxModule } from './modules/influx/Influx.module'; import { LiveLogsGateway } from './modules/live-logs/LiveLogs.gateway'; import { LoggerModule } from './modules/logger/Logger.module'; -import { MeasurementModule } from './modules/measurement/Measurement.module'; import { MqttClientModule } from './modules/mqtt/client/MqttClientModule'; import { MqttIngestionModule } from './modules/mqtt/ingestion/MqttIngestion.module'; import { OpenMCTModule } from './modules/openmct/OpenMCT.module'; import { FaultModule } from './modules/openmct/faults/Fault.module'; import { PublicDataModule } from './modules/public-data/PublicData.module'; import { RemoteLogsModule } from './modules/remote-logs/RemoteLogs.module'; +import { TelemetryModule } from './modules/telemetry/Telemetry.module'; import { WarningsModule } from './modules/warnings/Warnings.module'; @Module({ @@ -21,7 +21,7 @@ import { WarningsModule } from './modules/warnings/Warnings.module'; InfluxModule, MqttIngestionModule, OpenMCTModule, - MeasurementModule, + TelemetryModule, FaultModule, PodControlsModule, WarningsModule, diff --git a/telemetry/packages/server/src/modules/measurement/MeasurementReading.types.ts b/telemetry/packages/server/src/modules/measurement/MeasurementReading.types.ts deleted file mode 100644 index 2a6f24a7..00000000 --- a/telemetry/packages/server/src/modules/measurement/MeasurementReading.types.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { zodEnumFromObjKeys } from '@/modules/common/utils/zodEnumFromObjKeys'; -import { pods } from '@hyped/telemetry-constants'; -import { z } from 'zod'; - -export const MeasurementReadingSchema = z - .object({ - podId: zodEnumFromObjKeys(pods), - measurementKey: z.string(), - timestamp: z.string(), // to handle nanoseconds timestamp - value: z.number(), - }) - // Validate measurement exists and enum value is valid (if applicable) - .refine( - ({ podId, measurementKey, value }) => { - const measurement = pods[podId].measurements[measurementKey]; - - if (!measurement) { - return false; - } - - // Validate enum values - if (measurement.format === 'enum') { - const enumValue = measurement.enumerations.find( - (e) => e.value === value, - ); - - if (!enumValue) { - return false; - } - } - - // Validate integers and floats - if ( - (measurement.format === 'float' && Number.isNaN(value)) || - (measurement.format === 'integer' && !Number.isInteger(value)) - ) { - return false; - } - - return true; - }, - { - message: - 'Invalid measurement reading - measurement does not exist or invalid enum value', - }, - ); - -export type MeasurementReading = z.infer; diff --git a/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.module.ts b/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.module.ts index 9a063b4e..96c1328a 100644 --- a/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.module.ts +++ b/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.module.ts @@ -1,10 +1,10 @@ import { StateModule } from '@/modules/state/State.module'; +import { TelemetryModule } from '@/modules/telemetry/Telemetry.module'; import { Module } from '@nestjs/common'; -import { MeasurementModule } from 'src/modules/measurement/Measurement.module'; import { MqttIngestionService } from './MqttIngestion.service'; @Module({ - imports: [MeasurementModule, StateModule], + imports: [TelemetryModule, StateModule], providers: [MqttIngestionService], }) export class MqttIngestionModule {} diff --git a/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.service.ts b/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.service.ts index ff1429c3..b82606a7 100644 --- a/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.service.ts +++ b/telemetry/packages/server/src/modules/mqtt/ingestion/MqttIngestion.service.ts @@ -1,10 +1,6 @@ -import type { MeasurementService } from '@/modules/measurement/Measurement.service'; import type { StateService } from '@/modules/state/State.service'; -import { - POD_IDS, - type PodId, - type PodStateType, -} from '@hyped/telemetry-constants'; +import type { MeasurementService } from '@/modules/telemetry/Measurement.service'; +import { type PodStateType, podIds } from '@hyped/telemetry-constants'; import { currentTime } from '@influxdata/influxdb-client'; import { Injectable } from '@nestjs/common'; import { Params, Payload, Subscribe } from 'nest-mqtt'; @@ -71,8 +67,8 @@ export class MqttIngestionService { } } - private validatePodId(podId: string): asserts podId is PodId { - if (!POD_IDS.includes(podId as PodId)) { + private validatePodId(podId: string) { + if (!podIds.includes(podId)) { throw new MqttIngestionError('Invalid pod ID'); } } diff --git a/telemetry/packages/server/src/modules/openmct/data/realtime/RealtimeTelemetryData.gateway.ts b/telemetry/packages/server/src/modules/openmct/data/realtime/RealtimeTelemetryData.gateway.ts index 3b24349c..cb12e423 100644 --- a/telemetry/packages/server/src/modules/openmct/data/realtime/RealtimeTelemetryData.gateway.ts +++ b/telemetry/packages/server/src/modules/openmct/data/realtime/RealtimeTelemetryData.gateway.ts @@ -1,5 +1,5 @@ import { Logger } from '@/modules/logger/Logger.decorator'; -import type { MeasurementReading } from '@/modules/measurement/MeasurementReading.types'; +import type { MeasurementReading } from '@/modules/telemetry/MeasurementReading.types'; import { socket as socketConstants } from '@hyped/telemetry-constants'; import type { LoggerService } from '@nestjs/common'; import { diff --git a/telemetry/packages/server/src/modules/openmct/dictionary/Dictionary.service.ts b/telemetry/packages/server/src/modules/openmct/dictionary/Dictionary.service.ts index 6d3f438d..262bcfe9 100644 --- a/telemetry/packages/server/src/modules/openmct/dictionary/Dictionary.service.ts +++ b/telemetry/packages/server/src/modules/openmct/dictionary/Dictionary.service.ts @@ -1,20 +1,20 @@ -import { POD_IDS, type PodId, pods } from '@hyped/telemetry-constants'; +import { podIds, pods } from '@hyped/telemetry-constants'; import type { OpenMctDictionary, OpenMctPod } from '@hyped/telemetry-types'; import { Injectable } from '@nestjs/common'; -import { mapMeasurementToOpenMct } from './utils/mapMeasurementToOpenMct'; +import { mapMeasurementToOpenMct } from './utils/map-to-openmct'; @Injectable() export class DictionaryService { getDictionary(): OpenMctDictionary { const dictionary: OpenMctDictionary = {}; - for (const podId of POD_IDS) { + for (const podId of podIds) { dictionary[podId] = this.getPod(podId); } return dictionary; } getPodIds() { - return POD_IDS; + return podIds; } getPod(podId: string): OpenMctPod { @@ -26,7 +26,7 @@ export class DictionaryService { ); return { - name: pod.name, + name: pod.label, id: pod.id, measurements, }; @@ -44,8 +44,8 @@ export class DictionaryService { return mapMeasurementToOpenMct(measurement); } - private validatePodId(podId: string): asserts podId is PodId { - if (!POD_IDS.includes(podId as PodId)) { + private validatePodId(podId: string) { + if (!podIds.includes(podId)) { throw new Error(`Pod ${podId} not found`); } } diff --git a/telemetry/packages/server/src/modules/openmct/dictionary/utils/mapMeasurementToOpenMct.ts b/telemetry/packages/server/src/modules/openmct/dictionary/utils/map-to-openmct.ts similarity index 58% rename from telemetry/packages/server/src/modules/openmct/dictionary/utils/mapMeasurementToOpenMct.ts rename to telemetry/packages/server/src/modules/openmct/dictionary/utils/map-to-openmct.ts index f4c04915..e307979e 100644 --- a/telemetry/packages/server/src/modules/openmct/dictionary/utils/mapMeasurementToOpenMct.ts +++ b/telemetry/packages/server/src/modules/openmct/dictionary/utils/map-to-openmct.ts @@ -4,23 +4,18 @@ export function mapMeasurementToOpenMct( measurement: Measurement, ): OpenMctMeasurement { return { - name: measurement.name, - key: measurement.key, + name: measurement.label, + key: measurement.id, type: measurement.type, values: [ { key: 'value', - name: measurement.name, + name: measurement.label, unit: measurement.unit, format: measurement.format, - ...('limits' in measurement && { - min: measurement.limits?.critical.low, - max: measurement.limits?.critical.high, - limits: measurement.limits, - }), - ...('enumerations' in measurement && { - enumerations: measurement.enumerations, - }), + min: measurement.limits?.critical.low, + max: measurement.limits?.critical.high, + limits: measurement.limits, hints: { range: 1, }, diff --git a/telemetry/packages/server/src/modules/openmct/faults/Fault.service.ts b/telemetry/packages/server/src/modules/openmct/faults/Fault.service.ts index 0ab4da5c..7620a050 100644 --- a/telemetry/packages/server/src/modules/openmct/faults/Fault.service.ts +++ b/telemetry/packages/server/src/modules/openmct/faults/Fault.service.ts @@ -1,19 +1,22 @@ import type { InfluxService } from '@/modules/influx/Influx.service'; import { Logger } from '@/modules/logger/Logger.decorator'; -import type { MeasurementReading } from '@/modules/measurement/MeasurementReading.types'; +import type { MeasurementReading } from '@/modules/telemetry/MeasurementReading.types'; import type { FaultLevel } from '@hyped/telemetry-constants'; -import type { OpenMctFault, Unpacked } from '@hyped/telemetry-types'; +import type { + Measurement, + OpenMctFault, + Unpacked, +} from '@hyped/telemetry-types'; import type { HistoricalFaults } from '@hyped/telemetry-types/dist/openmct/openmct-fault.types'; -import type { RangeMeasurement } from '@hyped/telemetry-types/dist/pods/pods.types'; import { Point } from '@influxdata/influxdb-client'; import { Injectable, type LoggerService } from '@nestjs/common'; import type { HistoricalFaultDataService } from './data/historical/HistoricalFaultData.service'; import type { RealtimeFaultDataGateway } from './data/realtime/RealtimeFaultData.gateway'; -import { convertToOpenMctFault } from './utils/convertToOpenMctFault'; +import { convertToOpenMctFault } from './utils/convert-to-openmct-fault'; export type Fault = { level: FaultLevel; - measurement: RangeMeasurement; + measurement: Measurement; tripReading: MeasurementReading; }; @@ -37,7 +40,7 @@ export class FaultService { const possibleExistingFaults = await this.historicalService.getHistoricalFaults({ podId: tripReading.podId, - measurementKey: measurement.key, + measurementKey: measurement.id, }); // If there's an existing fault, update it instead of creating a new one @@ -203,7 +206,7 @@ export class FaultService { .timestamp(tripReading.timestamp) .tag('faultId', openMctFault.fault.id) .tag('podId', tripReading.podId) - .tag('measurementKey', measurement.key) + .tag('measurementKey', measurement.id) // is influx the right choice? probably not - but we're already using it for telemetry .stringField('fault', JSON.stringify(openMctFault)); diff --git a/telemetry/packages/server/src/modules/openmct/faults/utils/convertToOpenMctFault.ts b/telemetry/packages/server/src/modules/openmct/faults/utils/convert-to-openmct-fault.ts similarity index 88% rename from telemetry/packages/server/src/modules/openmct/faults/utils/convertToOpenMctFault.ts rename to telemetry/packages/server/src/modules/openmct/faults/utils/convert-to-openmct-fault.ts index 6dd54e17..69ce206b 100644 --- a/telemetry/packages/server/src/modules/openmct/faults/utils/convertToOpenMctFault.ts +++ b/telemetry/packages/server/src/modules/openmct/faults/utils/convert-to-openmct-fault.ts @@ -10,13 +10,13 @@ import type { Fault } from '../Fault.service'; export function convertToOpenMctFault(fault: Fault): OpenMctFault { const { measurement, tripReading, level } = fault; - const namespace = `/${tripReading.podId}/${measurement.key}`; + const namespace = `/${tripReading.podId}/${measurement.id}`; return { type: 'global-alarm-status', fault: { id: `${namespace}-${nanoid()}`, - name: `${measurement.name} is out of range`, + name: `${measurement.label} is out of range`, namespace, seqNum: 0, severity: level, diff --git a/telemetry/packages/server/src/modules/public-data/PublicData.controller.ts b/telemetry/packages/server/src/modules/public-data/PublicData.controller.ts index 666fbbf0..6c140401 100644 --- a/telemetry/packages/server/src/modules/public-data/PublicData.controller.ts +++ b/telemetry/packages/server/src/modules/public-data/PublicData.controller.ts @@ -1,5 +1,5 @@ import type { HistoricalTelemetryDataService } from '@/modules/openmct/data/historical/HistoricalTelemetryData.service'; -import { POD_IDS, type PodId } from '@hyped/telemetry-constants'; +import { podIds } from '@hyped/telemetry-constants'; import type { LevitationHeightResponse, RawLevitationHeight, @@ -168,7 +168,7 @@ export class PublicDataController { } private validatePodId(podId: string) { - if (!POD_IDS.includes(podId as PodId)) { + if (!podIds.includes(podId)) { throw new HttpException('Invalid pod ID', 400); } } diff --git a/telemetry/packages/server/src/modules/measurement/Measurement.service.ts b/telemetry/packages/server/src/modules/telemetry/Measurement.service.ts similarity index 77% rename from telemetry/packages/server/src/modules/measurement/Measurement.service.ts rename to telemetry/packages/server/src/modules/telemetry/Measurement.service.ts index ed7a4e3d..2c2f1a0a 100644 --- a/telemetry/packages/server/src/modules/measurement/Measurement.service.ts +++ b/telemetry/packages/server/src/modules/telemetry/Measurement.service.ts @@ -10,7 +10,7 @@ import { MeasurementReadingSchema, } from './MeasurementReading.types'; import { MeasurementReadingValidationError } from './errors/MeasurementReadingValidationError'; -import { doesMeasurementBreachLimits } from './utils/doesMeasurementBreachLimits'; +import { doesMeasurementBreachLimits } from './utils/limit-breach-checker'; @Injectable() export class MeasurementService { @@ -42,19 +42,18 @@ export class MeasurementService { }); // Then check if it breaches limits - if (measurement.format === 'float' || measurement.format === 'integer') { - const breachLevel = doesMeasurementBreachLimits(measurement, reading); - if (breachLevel) { - this.logger.debug( - `Measurement breached limits {${props.podId}/${props.measurementKey}}: ${breachLevel} with value ${props.value}`, - MeasurementService.name, - ); - await this.faultService.addLimitBreachFault({ - level: breachLevel, - measurement, - tripReading: reading, - }); - } + const breachLevel = doesMeasurementBreachLimits(measurement, value); + + if (breachLevel) { + this.logger.debug( + `Measurement breached limits {${props.podId}/${props.measurementKey}}: ${breachLevel} with value ${props.value}`, + MeasurementService.name, + ); + await this.faultService.addLimitBreachFault({ + level: breachLevel, + measurement, + tripReading: reading, + }); } // Then save it to the database @@ -62,7 +61,6 @@ export class MeasurementService { .timestamp(timestamp) .tag('podId', podId) .tag('measurementKey', measurementKey) - .tag('format', measurement.format) .floatField('value', value); try { @@ -90,9 +88,17 @@ export class MeasurementService { const { podId, measurementKey } = result.data; + const possibleMeasurement = pods?.[podId]?.measurements?.[measurementKey]; + + if (!possibleMeasurement) { + throw new MeasurementReadingValidationError( + `Measurement ${measurementKey} not found for pod ${podId}`, + ); + } + return { reading: result.data, - measurement: pods[podId].measurements[measurementKey], + measurement: possibleMeasurement, }; } } diff --git a/telemetry/packages/server/src/modules/telemetry/MeasurementReading.types.ts b/telemetry/packages/server/src/modules/telemetry/MeasurementReading.types.ts new file mode 100644 index 00000000..f3e3add8 --- /dev/null +++ b/telemetry/packages/server/src/modules/telemetry/MeasurementReading.types.ts @@ -0,0 +1,12 @@ +import { zodEnumFromObjKeys } from '@/modules/common/utils/zodEnumFromObjKeys'; +import { pods } from '@hyped/telemetry-constants'; +import { z } from 'zod'; + +export const MeasurementReadingSchema = z.object({ + podId: zodEnumFromObjKeys(pods), + measurementKey: z.string(), + timestamp: z.string(), // to handle nanoseconds timestamp + value: z.number(), +}); + +export type MeasurementReading = z.infer; diff --git a/telemetry/packages/server/src/modules/telemetry/Status.service.ts b/telemetry/packages/server/src/modules/telemetry/Status.service.ts new file mode 100644 index 00000000..8974e466 --- /dev/null +++ b/telemetry/packages/server/src/modules/telemetry/Status.service.ts @@ -0,0 +1,92 @@ +import type { InfluxService } from '@/modules/influx/Influx.service'; +import { Logger } from '@/modules/logger/Logger.decorator'; +import type { RealtimeTelemetryDataGateway } from '@/modules/openmct/data/realtime/RealtimeTelemetryData.gateway'; +import type { FaultService } from '@/modules/openmct/faults/Fault.service'; +import { pods } from '@hyped/telemetry-constants'; +import { Point } from '@influxdata/influxdb-client'; +import { Injectable, type LoggerService } from '@nestjs/common'; +import { type StatusEntry, StatusEntrySchema } from './StatusEntry.types'; +import { StatusEntryValidationError } from './errors/StatusEntryValidationError'; + +@Injectable() +export class StatusService { + constructor( + @Logger() + private readonly logger: LoggerService, + private influxService: InfluxService, + private realtimeDataGateway: RealtimeTelemetryDataGateway, + ) {} + + // This function _is_ ordered in importance + public async addStatusEntry(props: StatusEntry) { + const validatedStatusEntry = this.validateStatusEntry(props); + + if (!validatedStatusEntry) { + throw new StatusEntryValidationError('Invalid status entry'); + } + + const { + status, + entry: { podId, value, timestamp }, + } = validatedStatusEntry; + + // First, get the data to the client ASAP + this.realtimeDataGateway.sendMeasurementReading({ + podId, + measurementKey: status.id, // temp + value, + timestamp, + }); + + // Then save it to the database + const point = new Point('measurement') // temp until we switch to postgres + .timestamp(timestamp) + .tag('podId', podId) + .tag('measurementKey', status.id) + .floatField('value', value); + + try { + this.influxService.telemetryWrite.writePoint(point); + + this.logger.debug( + `Added status {${props.podId}/${props.statusId}}: ${props.value}`, + StatusService.name, + ); + } catch (e: unknown) { + this.logger.error( + `Failed to add status {${props.podId}/${props.statusId}}: ${props.value}`, + e, + StatusService.name, + ); + } + } + + private validateStatusEntry(props: StatusEntry) { + const result = StatusEntrySchema.safeParse(props); + + if (!result.success) { + throw new StatusEntryValidationError(result.error.message); + } + + const { podId, statusId, value } = result.data; + + const possibleStatus = pods?.[podId]?.statuses?.[statusId]; + + if (!possibleStatus) { + throw new StatusEntryValidationError( + `Status ${statusId} not found for pod ${podId}`, + ); + } + + if (!possibleStatus.values.map((v) => v.value).includes(value)) { + throw new StatusEntryValidationError( + `Status '${statusId}' value '${value}' is not valid`, + ); + } + + return { + status: possibleStatus, + entry: result.data, + }; + } +} diff --git a/telemetry/packages/server/src/modules/telemetry/StatusEntry.types.ts b/telemetry/packages/server/src/modules/telemetry/StatusEntry.types.ts new file mode 100644 index 00000000..4fdc40ea --- /dev/null +++ b/telemetry/packages/server/src/modules/telemetry/StatusEntry.types.ts @@ -0,0 +1,12 @@ +import { zodEnumFromObjKeys } from '@/modules/common/utils/zodEnumFromObjKeys'; +import { pods } from '@hyped/telemetry-constants'; +import { z } from 'zod'; + +export const StatusEntrySchema = z.object({ + podId: zodEnumFromObjKeys(pods), + statusId: z.string(), + timestamp: z.string(), + value: z.number(), +}); + +export type StatusEntry = z.infer; diff --git a/telemetry/packages/server/src/modules/measurement/Measurement.module.ts b/telemetry/packages/server/src/modules/telemetry/Telemetry.module.ts similarity index 92% rename from telemetry/packages/server/src/modules/measurement/Measurement.module.ts rename to telemetry/packages/server/src/modules/telemetry/Telemetry.module.ts index d41823ed..8af448fc 100644 --- a/telemetry/packages/server/src/modules/measurement/Measurement.module.ts +++ b/telemetry/packages/server/src/modules/telemetry/Telemetry.module.ts @@ -9,4 +9,4 @@ import { MeasurementService } from './Measurement.service'; providers: [MeasurementService], exports: [MeasurementService], }) -export class MeasurementModule {} +export class TelemetryModule {} diff --git a/telemetry/packages/server/src/modules/measurement/errors/MeasurementReadingValidationError.ts b/telemetry/packages/server/src/modules/telemetry/errors/MeasurementReadingValidationError.ts similarity index 100% rename from telemetry/packages/server/src/modules/measurement/errors/MeasurementReadingValidationError.ts rename to telemetry/packages/server/src/modules/telemetry/errors/MeasurementReadingValidationError.ts diff --git a/telemetry/packages/server/src/modules/telemetry/errors/StatusEntryValidationError.ts b/telemetry/packages/server/src/modules/telemetry/errors/StatusEntryValidationError.ts new file mode 100644 index 00000000..4e103fb8 --- /dev/null +++ b/telemetry/packages/server/src/modules/telemetry/errors/StatusEntryValidationError.ts @@ -0,0 +1,6 @@ +export class StatusEntryValidationError extends Error { + constructor(message: string) { + super(message); + this.name = 'StatusEntryValidationError'; + } +} diff --git a/telemetry/packages/server/src/modules/measurement/utils/doesMeasurementBreachLimits.ts b/telemetry/packages/server/src/modules/telemetry/utils/limit-breach-checker.ts similarity index 76% rename from telemetry/packages/server/src/modules/measurement/utils/doesMeasurementBreachLimits.ts rename to telemetry/packages/server/src/modules/telemetry/utils/limit-breach-checker.ts index f6a5092e..ebc000bf 100644 --- a/telemetry/packages/server/src/modules/measurement/utils/doesMeasurementBreachLimits.ts +++ b/telemetry/packages/server/src/modules/telemetry/utils/limit-breach-checker.ts @@ -1,15 +1,13 @@ import type { FaultLevel } from '@hyped/telemetry-constants'; -import type { RangeMeasurement } from '@hyped/telemetry-types/dist/pods/pods.types'; +import type { Measurement } from '@hyped/telemetry-types'; import type { MeasurementReading } from '../MeasurementReading.types'; export type DoesMeasurementBreachLimitsReturn = false | FaultLevel; export function doesMeasurementBreachLimits( - measurement: RangeMeasurement, - reading: MeasurementReading, + measurement: Measurement, + value: MeasurementReading['value'], ): DoesMeasurementBreachLimitsReturn { - const { value } = reading; - const { low, high } = measurement.limits.critical; if (value < low || value > high) { return 'CRITICAL'; diff --git a/telemetry/packages/types/package.json b/telemetry/packages/types/package.json index 2cf81abe..645ff8c5 100644 --- a/telemetry/packages/types/package.json +++ b/telemetry/packages/types/package.json @@ -11,6 +11,7 @@ }, "dependencies": { "@hyped/tsconfig": "workspace:*", - "typescript": "^5.7.3" + "typescript": "^5.7.3", + "zod": "^3.21.4" } } diff --git a/telemetry/packages/types/src/index.ts b/telemetry/packages/types/src/index.ts index dcc5f34d..83037e6a 100644 --- a/telemetry/packages/types/src/index.ts +++ b/telemetry/packages/types/src/index.ts @@ -1,9 +1,9 @@ +export { PodSchema } from './pods/pods'; export type { - Measurement, - RangeMeasurement, - Limits, Pod, -} from './pods/pods.types'; + Measurement, + Status, +} from './pods/pods'; export type { OpenMctDictionary, OpenMctPod, diff --git a/telemetry/packages/types/src/openmct/openmct-dictionary.types.ts b/telemetry/packages/types/src/openmct/openmct-dictionary.types.ts index f54c2f58..3bcb5580 100644 --- a/telemetry/packages/types/src/openmct/openmct-dictionary.types.ts +++ b/telemetry/packages/types/src/openmct/openmct-dictionary.types.ts @@ -1,4 +1,4 @@ -import type { Limits } from '../pods/pods.types'; +import type { MeasurementLimits } from '../pods/pods'; /** * Type of an Open MCT measurement. @@ -14,7 +14,7 @@ export type OpenMctMeasurement = { format: string; min?: number; max?: number; - limits?: Limits; + limits?: MeasurementLimits; enumerations?: { value: number; string: string; diff --git a/telemetry/packages/types/src/pods/pods.ts b/telemetry/packages/types/src/pods/pods.ts new file mode 100644 index 00000000..3cc66875 --- /dev/null +++ b/telemetry/packages/types/src/pods/pods.ts @@ -0,0 +1,50 @@ +import { z } from 'zod'; + +export const LimitSchema = z.object({ + low: z.number(), + high: z.number(), +}); + +export type Limit = z.infer; + +export const MeasurementLimitsSchema = z.object({ + critical: LimitSchema, + warning: LimitSchema.optional(), +}); + +export type MeasurementLimits = z.infer; + +export const MeasurementSchema = z.object({ + id: z.string(), + label: z.string(), + unit: z.string(), + type: z.string(), + format: z.enum(['float', 'integer']), + limits: MeasurementLimitsSchema, +}); + +export type Measurement = z.infer; + +export const StatusSchema = z.object({ + id: z.string(), + label: z.string(), + format: z.literal('enum'), + values: z.array( + z.object({ + value: z.number(), + label: z.string(), + }), + ), +}); + +export type Status = z.infer; + +export const PodSchema = z.object({ + id: z.string(), + label: z.string(), + mode: z.enum(['ALL_SYSTEMS_ON', 'LEVITATION_ONLY', 'LIM_ONLY']), + measurements: z.record(z.string(), MeasurementSchema), + statuses: z.record(z.string(), StatusSchema), +}); + +export type Pod = z.infer; diff --git a/telemetry/packages/types/src/pods/pods.types.ts b/telemetry/packages/types/src/pods/pods.types.ts deleted file mode 100644 index 361bdc98..00000000 --- a/telemetry/packages/types/src/pods/pods.types.ts +++ /dev/null @@ -1,49 +0,0 @@ -// common properties shared by all response variables -export type BaseMeasurement = { - name: string; - key: string; - unit: string; - type: string; -}; - -// range limits not to be exceeded -// some give warnings when reaching range limits -export type Limits = { - warning?: { - low: number; - high: number; - }; - critical: { - low: number; - high: number; - }; -}; - -// For numerical sensor readings described by operational range sampling parameters -export type RangeMeasurement = BaseMeasurement & { - format: 'float' | 'integer'; - limits: Limits; - rms_noise: number; - sampling_time: number; -}; - -// For discrete status measurements with enumerated states -export type EnumMeasurement = BaseMeasurement & { - format: 'enum'; - enumerations: { - value: number; - string: string; - }[]; -}; - -// export type Measurement as union -export type Measurement = RangeMeasurement | EnumMeasurement; - -// create Pod type -export type Pod = { - name: string; - id: string; - measurements: Record; - // Not ideal given this is defined in the constants package but will do until TOML is done - operationMode: 'ALL_SYSTEMS_ON' | 'LEVITATION_ONLY' | 'LIM_ONLY'; -}; diff --git a/telemetry/pnpm-lock.yaml b/telemetry/pnpm-lock.yaml index 2337453f..8c4608f8 100644 --- a/telemetry/pnpm-lock.yaml +++ b/telemetry/pnpm-lock.yaml @@ -36,6 +36,13 @@ importers: version: 5.7.3 packages/constants: + dependencies: + yaml: + specifier: ^2.7.0 + version: 2.7.0 + zod: + specifier: ^3.21.4 + version: 3.21.4 devDependencies: '@hyped/telemetry-types': specifier: workspace:* @@ -43,6 +50,9 @@ importers: '@hyped/tsconfig': specifier: workspace:* version: link:../tsconfig + '@types/node': + specifier: ^22.12.0 + version: 22.12.0 dts-cli: specifier: ^2.0.5 version: 2.0.5(@babel/plugin-syntax-flow@7.26.0(@babel/core@7.26.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.26.0))(@jest/transform@29.7.0)(@jest/types@29.6.3)(@types/babel__core@7.20.1)(@types/node@22.12.0) @@ -298,6 +308,9 @@ importers: typescript: specifier: ^5.7.3 version: 5.7.3 + zod: + specifier: ^3.21.4 + version: 3.21.4 packages/ui: dependencies: @@ -7098,6 +7111,11 @@ packages: resolution: {integrity: sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==} engines: {node: '>= 14'} + yaml@2.7.0: + resolution: {integrity: sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==} + engines: {node: '>= 14'} + hasBin: true + yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} @@ -15124,6 +15142,8 @@ snapshots: yaml@2.3.1: {} + yaml@2.7.0: {} + yargs-parser@21.1.1: {} yargs@17.7.2: