5 Commits

Author SHA1 Message Date
340df9a5d4 Refactor Mqtt Sync client (#5)
Co-authored-by: Felipe Diniello <felipediniello@pm.me>
Reviewed-on: #5
2023-08-01 20:07:43 +02:00
c529335eac Antennas basic endpoints (#4)
Co-authored-by: Felipe Diniello <felipediniello@pm.me>
Reviewed-on: #4
2023-07-23 18:53:49 +02:00
990e8955e4 Fix left overs after models refactor (#3)
Co-authored-by: Felipe Diniello <felipediniello@pm.me>
Reviewed-on: #3
2023-06-19 18:37:49 +02:00
962b90e1b8 Refactor Models for InfluxDB (#1)
Update to InfluxDB 2.0 and update all interfaces to work with it.

MAC has been deprecated, since current `influxdb2` doesn't support non built-in types for read/write into the DB.

Co-authored-by: Felipe Diniello <felipediniello@pm.me>
Reviewed-on: #1
2023-06-18 18:43:15 +02:00
a5976252e8 Create workspace from previous implementation files 2023-06-12 23:21:33 +02:00
34 changed files with 1667 additions and 15 deletions

8
.env Normal file
View File

@@ -0,0 +1,8 @@
DATABASE_URL=postgres://kairo:AJzYhFltZXRiGQ@localhost/kairoXYZ_db
INFLUX_HOST=http://localhost:8086
INFLUX_BUCKET=db0
INFLUX_ORG=kario
INFLUX_TOKEN=82GAOBcdQoPnFNp_aew3DPffg44ihr4-lxs2BMGQ7RJ6nZyqSAFerX-WaHgLC47hTI23LgOauEfyTU_FKT0SpQ==
MQTT_BROKER=tcp://localhost:1883

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
target target
Cargo.lock Cargo.lock
data
*.code-workspace

22
Cargo.toml Normal file
View File

@@ -0,0 +1,22 @@
[workspace]
members = [
# Well of course, all the common/shared source code among services is going to end up somewhere:
"kairo-common",
# The intended backend application to expose a REST API:
"kairo-core",
# The intended frontend application for GUI navigation:
"kairo-nav",
# Tools and whatnots for testing or simulating other components:
"simulation-tools",
# The service doing the calculations:
"xyz-engine"
]
[workspace.dependencies]
tokio = { version = "1.28.1", features = ["rt-multi-thread", "macros"] }
dotenv = "0.15.0"
chrono = "0.4.24"
paho-mqtt = "0.12.1"
serde = "1.0.162"
serde_json = { version = "1.0.95" }
diesel = { version = "2.1.0", features = ["postgres", "extras"] }

34
docker-compose.yml Normal file
View File

@@ -0,0 +1,34 @@
version: "3"
services:
postgres:
image: "postgres:latest"
container_name: "postgres"
ports:
- 5432:5432
environment:
- POSTGRES_USER=kairo
- POSTGRES_PASSWORD=AJzYhFltZXRiGQ
- POSTGRES_DB=kairoXYZ_db
# volumes:
# - ./data/postgres:/var/lib/postgresql/data/
mqtt_broker:
image: "eclipse-mosquitto:latest"
container_name: "mosquitto"
network_mode: host
influx:
container_name: "influxdb"
tmpfs:
- /var/lib/influxdb
ports:
- 8086:8086
image: "influxdb:latest"
environment:
- DOCKER_INFLUXDB_INIT_MODE=setup
- DOCKER_INFLUXDB_INIT_USERNAME=user
- DOCKER_INFLUXDB_INIT_PASSWORD=Lkj9s2iAnd7Gxg
- DOCKER_INFLUXDB_INIT_ORG=kario
- DOCKER_INFLUXDB_INIT_BUCKET=db0
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=82GAOBcdQoPnFNp_aew3DPffg44ihr4-lxs2BMGQ7RJ6nZyqSAFerX-WaHgLC47hTI23LgOauEfyTU_FKT0SpQ==

View File

@@ -3,6 +3,22 @@ name = "kairo-common"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # [[test]]
# name = "all"
# path = "test/all.rs"
[dependencies] [dependencies]
paho-mqtt = { workspace = true }
tokio = { workspace = true }
dotenv = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
diesel = { workspace = true }
influxdb2 = "0.4.2"
influxdb2-structmap = "0.2"
influxdb2-derive = "0.1.1"
futures = "0.3.28"
num-traits = "0.2"

9
kairo-common/diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId"]
[migrations_directory]
dir = "migrations"

View File

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE antennas;

View File

@@ -0,0 +1,10 @@
-- Your SQL goes here
CREATE TABLE antennas (
id VARCHAR(17) PRIMARY KEY,
tssi DOUBLE PRECISION NOT NULL,
pos_x DOUBLE PRECISION NOT NULL,
pos_y DOUBLE PRECISION NOT NULL,
pos_z DOUBLE PRECISION NOT NULL,
comment TEXT
);

123
kairo-common/src/influx.rs Normal file
View File

@@ -0,0 +1,123 @@
use std::cell::RefCell;
#[derive(Debug, Clone)]
pub struct Client {
client: influxdb2::Client,
// We should get two buckets, one temp and a permanent, but for now we'll use just one
bucket: String,
}
pub enum Bucket {
Tmp,
Perm,
}
thread_local! {
static INFLUX_CLIENT : RefCell<Client> = Client::new();
}
impl Client {
fn new() -> RefCell<Client> {
let host = dotenv::var("INFLUX_HOST").unwrap_or_else(|_| {
println! {"INFLUX_HOST not found in .env file, using default: http://localhost:8086"};
"http://localhost:8086".to_string()
});
let bucket = dotenv::var("INFLUX_BUCKET").expect("INFLUX_BUCKET not defined in .env file");
let org = dotenv::var("INFLUX_ORG").expect("INFLUX_ORG not defined in .env file");
let token = dotenv::var("INFLUX_TOKEN").expect("INFLUX_TOKEN not defined in .env file");
RefCell::new(Client {
client: influxdb2::Client::new(host, org, token),
bucket,
})
}
pub fn get() -> Client {
INFLUX_CLIENT.with(|rc| rc.borrow().clone())
}
pub async fn write(
&self,
_bucket: Bucket,
body: impl futures::Stream<Item = impl influxdb2::models::WriteDataPoint>
+ Send
+ Sync
+ 'static,
) -> Result<(), influxdb2::RequestError> {
// TODO: use _bucket to choose from internal list
self.client.write(self.bucket.as_str(), body).await
}
pub async fn query<T>(
&self,
_bucket: Bucket,
q: String,
) -> Result<Vec<T>, influxdb2::RequestError>
where
T: influxdb2_structmap::FromMap,
{
// TODO: use _bucket to choose from internal list
let from_bucket = format!("from(bucket: \"{}\")", self.bucket);
let query = from_bucket + &q;
let query = influxdb2::models::Query::new(query);
self.client.query::<T>(Some(query)).await
}
}
#[cfg(test)]
mod test {
use crate::influx::{Bucket, Client};
#[tokio::test]
async fn test_new_get_cli() {
let health = Client::get().client.health().await;
assert!(health.is_ok())
}
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
#[derive(Default, Debug, PartialEq, FromDataPoint, WriteDataPoint)]
#[measurement = "stock_prices"]
struct StockPrice {
#[influxdb(tag)]
ticker: String,
#[influxdb(field)]
value: f64,
#[influxdb(timestamp)]
time: i64,
}
#[tokio::test]
async fn test_write_then_query() {
let time = chrono::Utc::now().timestamp_nanos();
let w = StockPrice {
ticker: "ASDF".into(),
value: 150.5,
time: time,
};
let res = Client::get()
.write(Bucket::Perm, futures::stream::iter([w]))
.await;
assert!(res.is_ok());
let query = format!(
"
|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"stock_prices\")
|> filter(fn: (r) => r[\"ticker\"] == \"ASDF\")
|> sort(columns: [\"time\"], desc: true)
"
);
let r = Client::get()
.query::<StockPrice>(Bucket::Perm, query)
.await
.unwrap();
assert!(r.len() > 0);
assert_eq!(r[0].ticker, "ASDF");
assert_eq!(r[0].value, 150.5);
}
}

View File

@@ -1,14 +1,40 @@
pub fn add(left: usize, right: usize) -> usize { #![allow(clippy::upper_case_acronyms)]
left + right #![allow(mixed_script_confusables)]
#![allow(non_upper_case_globals)]
#![allow(confusable_idents)]
pub mod influx;
pub mod postgres;
// random functions for mqtt
pub mod mqtt;
pub mod unit_conversion;
// Commonly used types across the services
mod types {
pub mod mac; // deprecated for the time being.
pub mod point;
} }
pub type Point = types::point::Point;
pub type MAC = types::mac::MAC;
#[cfg(test)] // DB models: for SQL with Diesel and InfluxDB and influxdb-derive
mod tests { pub mod schema;
use super::*; mod models {
pub mod antenna;
pub mod beacon_measure;
pub mod dynamic_device_status;
pub mod known_position;
#[test] #[derive(Debug, serde::Serialize, serde::Deserialize)]
fn it_works() { pub struct DeviceReport {
let result = add(2, 2); pub data: Vec<crate::models::beacon_measure::BeaconMeasure>,
assert_eq!(result, 4);
} }
} }
pub type DeviceReport = models::DeviceReport;
pub type Antenna = models::antenna::Antenna;
pub type KnownPosition = models::known_position::KnownPosition;
pub type DynamicDeviceStatus = models::dynamic_device_status::DynamicDeviceStatus;
pub type BeaconMeasure = models::beacon_measure::BeaconMeasure;

View File

@@ -0,0 +1,96 @@
use diesel::prelude::*;
use std::f64::consts::PI;
use crate::{unit_conversion::UnitsConversion, Point};
#[derive(
Debug,
Clone,
Queryable,
Selectable,
Insertable,
AsChangeset,
serde::Serialize,
serde::Deserialize,
)]
#[diesel(check_for_backend(diesel::pg::Pg))]
#[diesel(table_name = crate::schema::antennas)]
pub struct Antenna {
pub id: String,
pub tssi: f64,
pub pos_x: f64,
pub pos_y: f64,
pub pos_z: f64,
pub comment: Option<String>,
}
impl Antenna {
const C: f64 = 2.99e8;
const F: f64 = 2.4e9;
const λ: f64 = Self::C / Self::F;
pub fn new(id: &str, tssi: f64, coord: Point) -> Antenna {
Antenna {
id: id.into(),
comment: None,
pos_x: coord.x,
pos_y: coord.y,
pos_z: 0.0,
tssi,
}
}
pub fn coord(&self) -> Point {
Point::new(self.pos_x, self.pos_y)
}
pub fn get_rssi(&self, distance: f64) -> f64 {
#[allow(non_snake_case)]
// Free Space Path Loss
let FSPL = (((distance * 4.0 * PI) / Self::λ).powi(2)).to_dB();
self.tssi - FSPL
}
#[allow(non_snake_case)]
pub fn get_distance_with_dBm(&self, rssi_dBm: f64) -> f64 {
let loss = self.tssi.dBm_to_W() / rssi_dBm.dBm_to_W();
let distance = (loss.sqrt() * Self::λ) / (4.0 * PI);
distance.abs()
}
#[allow(non_snake_case)]
pub fn get_distance_with_W(&self, rssi_W: f64) -> f64 {
let loss = self.tssi.dBm_to_W() / rssi_W;
let distance = (loss.sqrt() * Self::λ) / (4.0 * PI);
distance.abs()
}
}
#[test]
fn test() {
let tssi = 0.0; // dBm
let a = Antenna::new("AB:CD:EF:12:34:56", tssi, Point { x: 0.0, y: 0.0 });
// Known Attenuation values for 2.4GHz
// 5 meter = 54.02 dB = 3.96e-9 W
// 10 meter = 60.04 dB = 9.91e-10 W
// 20 meter = 66.06 dB = 2.48e-10 W
print!("Testing Antenna::get_rssi()");
assert!(f64::abs(-54.02 - a.get_rssi(5.0)) < 0.1);
assert!(f64::abs(-60.04 - a.get_rssi(10.0)) < 0.1);
assert!(f64::abs(-66.06 - a.get_rssi(20.0)) < 0.1);
println!(" ... ok");
print!("Testing Antenna::get_distance_with_dBm()");
assert!(f64::abs(5.0 - a.get_distance_with_dBm(-54.02)) < 0.5);
assert!(f64::abs(10.0 - a.get_distance_with_dBm(-60.04)) < 0.5);
assert!(f64::abs(20.0 - a.get_distance_with_dBm(-66.06)) < 0.5);
println!(" ... ok");
print!("Testing Antenna::get_distance_with_W()");
assert!(f64::abs(5.0 - a.get_distance_with_W(3.98e-9)) < 0.5);
assert!(f64::abs(10.0 - a.get_distance_with_W(9.91e-10)) < 0.5);
assert!(f64::abs(20.0 - a.get_distance_with_W(2.48e-10)) < 0.5);
println!(" ... ok");
}

View File

@@ -0,0 +1,68 @@
use serde::{Deserialize, Serialize};
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
#[derive(
Debug, Default, PartialEq, Clone, Serialize, Deserialize, FromDataPoint, WriteDataPoint,
)]
#[measurement = "beacon_measures"]
pub struct BeaconMeasure {
#[influxdb(tag)]
pub device_id: String,
#[influxdb(tag)]
pub beacon_id: String,
pub rssi: f64,
#[influxdb(timestamp)]
pub time: i64,
}
impl BeaconMeasure {
#[allow(non_snake_case)]
pub fn new(device_id: &str, beacon_id: &str, rssi_W: f64) -> BeaconMeasure {
BeaconMeasure {
device_id: device_id.into(),
beacon_id: beacon_id.to_owned(),
rssi: rssi_W,
time: chrono::Utc::now().timestamp_nanos(),
}
}
}
#[cfg(test)]
mod test {
use crate::influx::{Bucket, Client};
use crate::BeaconMeasure;
#[tokio::test]
async fn influx_test() {
let device_id = String::from("AB:CD:EF:01:23:45");
let beacon_id = String::from("01:23:45:AB:CD:EF");
let rssi_w = 0.001;
let bm = BeaconMeasure::new(&device_id, &beacon_id, rssi_w);
let res = Client::get()
.write(Bucket::Tmp, futures::stream::iter([bm]))
.await;
assert!(res.is_ok());
let query = format!(
"
|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"beacon_measures\")
|> filter(fn: (r) => r[\"beacon_id\"] == \"{}\" )
",
beacon_id
);
let r = Client::get()
.query::<BeaconMeasure>(Bucket::Tmp, query)
.await
.unwrap();
assert!(r.len() > 0);
assert_eq!(r[0].beacon_id, beacon_id);
assert_eq!(r[0].device_id, device_id);
assert_eq!(r[0].rssi, rssi_w);
}
}

View File

@@ -0,0 +1,10 @@
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DynamicDeviceStatus {
id: String,
pos_x: f64,
pos_y: f64,
pos_z: f64,
speed_x: f64,
speed_y: f64,
pub last_seen: chrono::DateTime<chrono::Utc>,
}

View File

@@ -0,0 +1,31 @@
use serde::{Deserialize, Serialize};
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
use crate::Point;
#[derive(
Debug, Default, PartialEq, Clone, Serialize, Deserialize, FromDataPoint, WriteDataPoint,
)]
#[measurement = "known_positions"]
pub struct KnownPosition {
#[influxdb(tag)]
pub id: String,
pub x: f64,
pub y: f64,
pub z: f64,
#[influxdb(timestamp)]
pub time: i64,
}
impl KnownPosition {
pub fn new(device_id: &str, pos: Point) -> KnownPosition {
KnownPosition {
id: device_id.into(),
time: chrono::Utc::now().timestamp_nanos(),
x: pos.x,
y: pos.y,
z: 0.0,
}
}
}

112
kairo-common/src/mqtt.rs Normal file
View File

@@ -0,0 +1,112 @@
pub mod for_async {
use mqtt::{AsyncClient, Message};
use paho_mqtt as mqtt;
use std::{process, time::Duration};
pub async fn get_mqtt_cli_and_stream(
) -> (mqtt::AsyncClient, mqtt::AsyncReceiver<Option<Message>>) {
let host = dotenv::var("MQTT_BROKER").unwrap_or_else(|_| {
println! {"MQTT_BROKER not found in .evn file, using default: tcp://localhost:1883"};
"tcp://localhost:1883".to_string()
});
// Create the client. Use an ID for a persistent session.
// A real system should try harder to use a unique ID.
let mqtt_options = mqtt::CreateOptionsBuilder::new()
.server_uri(host)
.client_id("mmRTLS_async_subscribe")
.finalize();
// Create the client connection
let mut client = AsyncClient::new(mqtt_options).unwrap_or_else(|e| {
println!("Error creating the client: {:?}", e);
process::exit(1);
});
// Get message stream before connecting.
let stream = client.get_stream(25);
// Define the set of options for the connection
let conn_opts = mqtt::ConnectOptionsBuilder::new()
.keep_alive_interval(Duration::from_secs(30))
.clean_session(false)
.finalize();
// Make the connection to the broker
println!("Connecting to the MQTT server...");
client.connect(conn_opts).await.unwrap_or_else(|e| {
println!("Error connecting to the broker: {:?}", e);
process::exit(1);
});
(client, stream)
}
pub async fn mqtt_cli_reconnect(client: &mqtt::AsyncClient) {
println!("Lost connection. Attempting reconnect.");
while let Err(err) = client.reconnect().await {
println!("Error reconnecting: {}", err);
tokio::time::sleep(Duration::from_millis(1000)).await;
}
}
pub async fn mqtt_subscribe(client: &mqtt::AsyncClient, topic: &str) {
client
.subscribe(topic, 1)
.await
.expect("Unable to subscribe");
}
}
pub mod for_sync {
use paho_mqtt as mqtt;
use std::{process, time::Duration};
pub struct MqttClient {
cli: mqtt::Client,
}
impl MqttClient {
pub fn new(client_id: Option<&str>) -> MqttClient {
let host = dotenv::var("MQTT_BROKER").unwrap_or_else(|_| {
println! {"MQTT_BROKER not found in .evn file, using default: tcp://localhost:1883"};
"tcp://localhost:1883".to_string()
});
let mut cli = if let Some(client_id) = client_id {
mqtt::Client::new((host, String::from(client_id))).unwrap_or_else(|e| {
println!("Error creating the client: {:?}", e);
process::exit(1);
})
} else {
mqtt::Client::new(host).unwrap_or_else(|e| {
println!("Error creating the client: {:?}", e);
process::exit(1);
})
};
// Use 5sec timeouts for sync calls.
cli.set_timeout(Duration::from_secs(5));
// Connect and wait for it to complete or fail
if let Err(e) = cli.connect(None) {
println!("Unable to connect: {:?}", e);
process::exit(1);
}
MqttClient { cli }
}
pub fn publish(&self, topic: &str, payload: Option<&str>) -> Result<(), paho_mqtt::Error> {
let msg = if let Some(payload) = payload {
mqtt::MessageBuilder::new()
.topic(topic)
.qos(0)
.payload(payload)
.finalize()
} else {
mqtt::MessageBuilder::new().topic(topic).qos(0).finalize()
};
self.cli.publish(msg)
}
}
}

View File

@@ -0,0 +1,25 @@
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
pub type DbConn = diesel::pg::PgConnection;
pub type DbPool = Pool<ConnectionManager<PgConnection>>;
pub struct DbPooledConn(pub PooledConnection<ConnectionManager<PgConnection>>);
pub fn establish_connection() -> DbConn {
let database_url = dotenv::var("DATABASE_URL").expect("DATABASE_URL must be set");
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
}
pub fn init_pool() -> DbPool {
let database_url = dotenv::var("DATABASE_URL").expect("DATABASE_URL must be set");
let manager = ConnectionManager::<PgConnection>::new(database_url);
DbPool::new(manager).expect("Error connecting to DB")
}
impl std::ops::Deref for DbPooledConn {
type Target = PgConnection;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View File

@@ -0,0 +1,13 @@
// @generated automatically by Diesel CLI.
diesel::table! {
antennas (id) {
#[max_length = 17]
id -> Varchar,
tssi -> Float8,
pos_x -> Float8,
pos_y -> Float8,
pos_z -> Float8,
comment -> Nullable<Text>,
}
}

View File

@@ -0,0 +1,113 @@
#[derive(Default, Clone, Copy, Hash, PartialEq, Eq)]
pub struct MAC {
s: [u8; 17],
}
impl MAC {
pub fn new(s: &str) -> MAC {
std::str::FromStr::from_str(s).unwrap()
}
pub fn as_str(&self) -> &str {
let a = std::str::from_utf8(&self.s);
a.unwrap()
}
}
////////////////////////////////////////////////////
// Standard implementations:
//
impl std::str::FromStr for MAC {
type Err = std::string::ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut m = MAC::default();
m.s.copy_from_slice(s.as_bytes());
Ok(m)
}
}
impl std::fmt::Display for MAC {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(&self.s))
}
}
impl std::fmt::Debug for MAC {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(&self.s))
}
}
////////////////////////////////////////////////////
// Influx implementations:
//
#[cfg(influxdb)]
impl From<MAC> for influxdb::Type {
fn from(val: MAC) -> Self {
influxdb::Type::Text(val.to_string())
}
}
impl influxdb2::writable::KeyWritable for MAC {
fn encode_key(&self) -> String {
format!("{}", self)
}
}
impl influxdb2::writable::ValueWritable for MAC {
fn encode_value(&self) -> String {
format!("{}", self)
}
}
////////////////////////////////////////////////////
// Serde implementations:
//
impl<'de> serde::Deserialize<'de> for MAC {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct MACVisitor {
len: usize,
}
impl<'de> serde::de::Visitor<'de> for MACVisitor {
type Value = MAC;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a string containing at least {} bytes", self.len)
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if s.len() == self.len {
Ok(MAC::new(s))
} else {
Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Str(s),
&self,
))
}
}
}
let visitor = MACVisitor { len: 17 };
deserializer.deserialize_str(visitor)
}
}
impl serde::Serialize for MAC {
fn serialize<S>(
&self,
serializer: S,
) -> Result<<S as serde::Serializer>::Ok, <S as serde::Serializer>::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}

View File

@@ -0,0 +1,255 @@
use std::{
fmt::{Display, Formatter},
ops,
};
#[derive(Debug, Clone, Copy, Default, PartialEq)]
pub struct Point {
pub x: f64,
pub y: f64,
}
impl Point {
pub fn new(x: f64, y: f64) -> Point {
Point { x, y }
}
pub fn zero() -> Point {
Point { x: 0.0, y: 0.0 }
}
pub fn is_valid(&self) -> bool {
!self.x.is_nan() && !self.y.is_nan()
}
pub fn module(&self) -> f64 {
f64::sqrt(self.x * self.x + self.y * self.y)
}
pub fn phase(&self) -> f64 {
f64::atan2(self.y, self.x)
}
pub fn distance(a: &Point, b: &Point) -> f64 {
(a - b).module()
}
pub fn distance_to(&self, other: &Point) -> f64 {
(self - other).module()
}
pub fn as_versor(&self) -> Option<Point> {
if self.x == 0.0 && self.y == 0.0 {
None
} else {
Some(self / self.module())
}
}
pub fn rotate_by(&mut self, α: f64) {
let m = self.module();
let (sin, cos) = f64::sin_cos(self.phase() + α);
self.x = m * cos;
self.y = m * sin;
}
}
impl Display for Point {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "({:.2},{:.2})", &self.x, &self.y)
}
}
impl ops::Add<Point> for Point {
type Output = Point;
fn add(self, rhs: Point) -> Point {
Point {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl ops::Add<&Point> for &Point {
type Output = Point;
fn add(self, rhs: &Point) -> Point {
Point {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl ops::AddAssign<&Point> for Point {
fn add_assign(&mut self, rhs: &Point) {
*self = Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
};
}
}
impl ops::AddAssign<Point> for Point {
fn add_assign(&mut self, rhs: Point) {
*self = Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
};
}
}
impl ops::SubAssign<&Point> for Point {
fn sub_assign(&mut self, rhs: &Point) {
*self = Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
};
}
}
impl ops::SubAssign<Point> for Point {
fn sub_assign(&mut self, rhs: Point) {
*self = Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
};
}
}
impl ops::Sub<Point> for Point {
type Output = Point;
fn sub(self, rhs: Point) -> Point {
Point {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
impl ops::Sub<&Point> for &Point {
type Output = Point;
fn sub(self, rhs: &Point) -> Point {
Point {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
impl ops::Mul<f64> for Point {
type Output = Point;
fn mul(self, rhs: f64) -> Point {
Point {
x: self.x * rhs,
y: self.y * rhs,
}
}
}
impl ops::MulAssign<f64> for Point {
fn mul_assign(&mut self, rhs: f64) {
*self = Point {
x: self.x * rhs,
y: self.y * rhs,
}
}
}
impl ops::Mul<f64> for &Point {
type Output = Point;
fn mul(self, rhs: f64) -> Point {
Point {
x: self.x * rhs,
y: self.y * rhs,
}
}
}
impl ops::Div<f64> for Point {
type Output = Point;
fn div(self, rhs: f64) -> Point {
Point {
x: self.x / rhs,
y: self.y / rhs,
}
}
}
impl ops::DivAssign<f64> for Point {
fn div_assign(&mut self, rhs: f64) {
*self = Point {
x: self.x / rhs,
y: self.y / rhs,
}
}
}
impl ops::Div<f64> for &Point {
type Output = Point;
fn div(self, rhs: f64) -> Point {
Point {
x: self.x / rhs,
y: self.y / rhs,
}
}
}
#[test]
fn test() {
use std::f64::consts::{FRAC_1_SQRT_2, FRAC_PI_2, FRAC_PI_4, SQRT_2};
// New
let p = Point::new(0.0, 0.0);
print!("Testing Point::new()");
assert_eq!(p, Point { x: 0.0, y: 0.0 });
assert_ne!(p, Point { x: -1.0, y: 1.0 });
println!(" ... ok");
// is_valid
let n = Point::new(std::f64::NAN, std::f64::NAN);
let nn = Point::new(std::f64::NAN, 0.0);
print!("Testing Point::is_valid()");
assert_eq!(p.is_valid(), true);
assert_eq!(n.is_valid(), false);
assert_eq!(nn.is_valid(), false);
println!(" ... ok");
// module
let p = Point::new(1.0, 1.0);
let r = Point::new(2.0, 0.0);
print!("Testing Point::module()");
assert!(f64::abs(p.module() - SQRT_2) < 1e-10);
assert!(f64::abs(r.module() - 2.0) < 1e-10);
println!(" ... ok");
// phase
let p = Point::new(1.0, 1.0);
let r = Point::new(2.0, 0.0);
let q = Point::new(2.0, -2.0);
print!("Testing Point::phase()");
assert!(f64::abs(p.phase() - FRAC_PI_4) < 1e-6);
assert!(f64::abs(r.phase() - 0.0) < 1e-6);
assert!(f64::abs(q.phase() + FRAC_PI_4) < 1e-6);
println!(" ... ok");
//distance
let z = Point::zero();
let p = Point::new(1.0, 0.0);
let q = Point::new(1.0, 1.0);
print!("Testing Point::distance() and distance_to()");
assert_eq!(z.distance_to(&p), 1.0);
assert_eq!(Point::distance(&z, &p), 1.0);
assert!(f64::abs(Point::distance(&z, &q) - SQRT_2) < 1e-10);
println!(" ... ok");
//versor
print!("Testing Point::as_versor()");
assert_eq!(z.as_versor(), None);
assert_eq!(p, p.as_versor().unwrap());
let q_ver = q.as_versor().unwrap();
assert!(f64::abs(q_ver.x - FRAC_1_SQRT_2) < 1e-10);
assert!(f64::abs(q_ver.y - FRAC_1_SQRT_2) < 1e-10);
println!(" ... ok");
//rotate_by
let mut p = Point::new(1.0, 0.0);
print!("Testing Point::rotate_by()");
p.rotate_by(FRAC_PI_2);
assert!(f64::abs(p.x - 0.0) < 1e-10);
assert!(f64::abs(p.y - 1.0) < 1e-10);
p.rotate_by(-FRAC_PI_4);
assert!(f64::abs(p.x - FRAC_1_SQRT_2) < 1e-10);
assert!(f64::abs(p.y - FRAC_1_SQRT_2) < 1e-10);
println!(" ... ok");
}

View File

@@ -0,0 +1,60 @@
pub trait UnitsConversion {
#[allow(non_snake_case)]
fn dBm_to_W(&self) -> f64;
#[allow(non_snake_case)]
fn W_to_dBm(&self) -> f64;
#[allow(non_snake_case, clippy::wrong_self_convention)]
fn from_dB(&self) -> f64;
#[allow(non_snake_case)]
fn to_dB(&self) -> f64;
}
impl UnitsConversion for f64 {
fn dBm_to_W(&self) -> f64 {
10.0_f64.powf((self - 30.0) / 10.0)
}
fn W_to_dBm(&self) -> f64 {
30.0 + 10.0 * f64::log10(*self)
}
fn from_dB(&self) -> f64 {
10.0_f64.powf((*self) / 10.0)
}
fn to_dB(&self) -> f64 {
10.0 * f64::log10(*self)
}
}
#[test]
fn test_unit_conversion() {
print!("Testing conversion from W to dBm");
assert_eq!(1.0_f64.W_to_dBm(), 30.0);
assert_eq!(0.001_f64.W_to_dBm(), 0.0);
assert!(f64::abs(2.0_f64.W_to_dBm() - 33.0) < 0.1);
assert!(f64::abs(0.002_f64.W_to_dBm() - 3.0) < 0.1);
println!(" ... ok");
print!("Testing conversion from dBm to W");
assert_eq!(1.0, 30.0_f64.dBm_to_W());
assert_eq!(0.001, 0.0_f64.dBm_to_W());
assert!(f64::abs(2.0 - 33.0_f64.dBm_to_W()) < 0.1);
assert!(f64::abs(0.002 - 3.0_f64.dBm_to_W()) < 0.1);
println!(" ... ok");
print!("Testing conversion from dB to scalar");
assert_eq!(1.0, 0.0_f64.from_dB());
assert_eq!(10.0, 10.0_f64.from_dB());
assert_eq!(100.0, 20.0_f64.from_dB());
assert!(f64::abs(2.0 - 3.0_f64.from_dB()) < 0.1);
assert!(f64::abs(20.0 - 13_f64.from_dB()) < 0.1);
assert!(f64::abs(200.0 - 23_f64.from_dB()) < 0.5);
println!(" ... ok");
print!("Testing conversion from scalar to dB");
assert_eq!(1.0_f64.to_dB(), 0.0);
assert_eq!(10.0_f64.to_dB(), 10.0);
assert_eq!(100.0_f64.to_dB(), 20.0);
assert!(f64::abs(2.0_f64.to_dB() - 3.0) < 0.1);
assert!(f64::abs(20.0_f64.to_dB() - 13.0) < 0.1);
assert!(f64::abs(200.0_f64.to_dB() - 23.0) < 0.5);
println!(" ... ok");
}

View File

@@ -6,3 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
diesel = { workspace = true}
rocket = { version = "0.5.0-rc.3", features = ["json"] }
kairo-common = { path = "../kairo-common" }

View File

@@ -0,0 +1,77 @@
use diesel::prelude::*;
use rocket::{http::Status, serde::json::Json, State};
use kairo_common::{postgres, schema::antennas, Antenna};
#[rocket::get("/id/<id>")]
pub fn get_by_id(db_pool: &State<postgres::DbPool>, id: String) -> Option<Json<Antenna>> {
let mut db = db_pool.get().unwrap();
let res = antennas::table
.select(antennas::all_columns)
.find(id)
.get_result::<Antenna>(&mut db);
match res {
Ok(v) => Some(rocket::serde::json::Json(v)),
_ => None,
}
}
#[rocket::get("/")]
pub fn get_list(db_pool: &State<postgres::DbPool>) -> Json<Vec<Antenna>> {
let mut db = db_pool.get().unwrap();
let res = antennas::table
.select(antennas::all_columns)
.load::<Antenna>(&mut db);
match res {
Ok(v) => rocket::serde::json::Json(v),
_ => rocket::serde::json::Json(vec![]),
}
}
#[rocket::post("/new", format = "json", data = "<antenna>")]
pub fn new(db_pool: &State<postgres::DbPool>, antenna: Json<Antenna>) -> Status {
let mut db = db_pool.get().unwrap();
let res = diesel::insert_into(antennas::table)
.values(antenna.0)
.execute(&mut db);
match res {
Ok(_) => Status::Ok,
_ => Status::NotAcceptable,
}
}
#[rocket::patch("/update", format = "json", data = "<antenna>")]
pub fn update(db_pool: &State<postgres::DbPool>, antenna: Json<Antenna>) -> Status {
let mut db = db_pool.get().unwrap();
let res = diesel::update(antennas::table)
.filter(antennas::id.eq(antenna.id.clone()))
.set(antenna.0)
.execute(&mut db);
match res {
Ok(0) => Status::NotModified,
Ok(1) => Status::Ok,
_ => Status::BadRequest,
}
}
#[rocket::delete("/delete/<id>")]
pub fn delete(db_pool: &State<postgres::DbPool>, id: String) -> Status {
let mut db = db_pool.get().unwrap();
let res = diesel::delete(antennas::table)
.filter( antennas::id.eq(id) )
.execute(&mut db);
match res {
Ok(1) => Status::Ok,
_ => Status::BadRequest,
}
}

View File

@@ -1,3 +1,31 @@
fn main() { #[macro_use]
println!("Hello, world!"); extern crate rocket;
mod antennas;
use std::path::{Path, PathBuf};
use rocket::fs::NamedFile;
use rocket::response::status::NotFound;
use kairo_common::postgres;
#[get("/<file..>")]
async fn serve_file(file: PathBuf) -> Result<NamedFile, NotFound<String>> {
let path = Path::new("static/").join(file);
NamedFile::open(&path)
.await
.map_err(|e| NotFound(e.to_string()))
}
#[launch]
fn rocket() -> _ {
rocket::build()
.manage(postgres::init_pool())
.mount("/static", routes![serve_file])
.mount("/antennas/", routes![antennas::get_by_id])
.mount("/antennas/", routes![antennas::get_list])
.mount("/antennas/", routes![antennas::delete])
.mount("/antennas/", routes![antennas::update])
.mount("/antennas/", routes![antennas::new])
} }

View File

@@ -0,0 +1,22 @@
[package]
name = "simulation-tools"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bin]]
name = "nav_dev"
path = "src/nav_dev/main.rs"
[dependencies]
paho-mqtt = { workspace = true }
tokio = { workspace = true }
dotenv = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
kairo-common = {path = "../kairo-common" }
rand = "0.8.5"
rand_distr = "0.4.3"

View File

@@ -0,0 +1,57 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
use tokio::time;
use kairo_common::Point;
use crate::Config;
#[derive(Debug, Serialize)]
pub struct Error {
error: f64,
speed: f64,
time: DateTime<Utc>,
}
#[allow(dead_code)]
pub async fn thread(config: Config) {
let period = time::Duration::from_millis(500);
let mut position = Point::new(config.radius, 0.0);
let mut speed = position;
position.rotate_by(f64::to_radians(config.angle_step));
speed -= position;
let _speed = speed.module();
loop {
let start = time::Instant::now();
// let real = KnownPosition::get_last_for("real", 1).await;
// let calc = KnownPosition::get_last_for(config.id.as_str(), 1).await;
// if real.is_ok() && calc.is_ok() {
// let real = real.unwrap();
// let calc = calc.unwrap();
// if real.is_some() && calc.is_some() {
// let real = real.unwrap();
// let calc = calc.unwrap();
// #[allow(non_snake_case)]
// let Δx = real.x - calc.x;
// #[allow(non_snake_case)]
// let Δy = real.y - calc.y;
// let error = Error {
// speed,
// error: f64::sqrt(Δx.powi(2) + Δy.powi(2)),
// time: chrono::Utc::now(),
// };
// let table_name = format!("error_{}", config.id.as_str());
// get_influx_cli()
// .query(error.into_query(table_name.as_str()))
// .await
// .unwrap();
// }
time::sleep(period - start.elapsed()).await;
// }
}
}

View File

@@ -0,0 +1,107 @@
use rand_distr::{Distribution, Normal};
use std::{thread, time};
mod error_report;
use kairo_common::mqtt::for_sync::MqttClient;
use kairo_common::{Antenna, BeaconMeasure, DeviceReport, Point};
#[derive(Clone)]
pub struct Config {
period_ms: u64,
radius: f64,
noise_level: f64,
angle_step: f64,
id: String,
real: bool,
}
#[tokio::main]
async fn main() {
let config = parse_cli();
let period = time::Duration::from_millis(config.period_ms);
let noise_gen = Normal::new(0.0, config.noise_level).unwrap();
// if config.real {
// let config = config.clone();
// tokio::spawn(async move {
// error_report::thread(config).await;
// });
// }
let client = MqttClient::new(None);
let mut position = Point::new(config.radius, 0.0);
let antenna = vec![
Antenna::new("e6:ad:0b:2e:d7:11", 30.0, Point::new(15.0, 15.0)),
Antenna::new("c2:b5:f5:cc:e6:88", 30.0, Point::new(15.0, -15.0)),
Antenna::new("e6:2e:e6:88:f5:cc", 30.0, Point::new(-15.0, 15.0)),
Antenna::new("c2:ad:0b:b5:11:d7", 30.0, Point::new(-15.0, -15.0)),
];
let topic = format!("device/{}/report", config.id);
loop {
let start = time::Instant::now();
let mut report = DeviceReport { data: vec![] };
for ant in (antenna).iter() {
let d = ant.coord().distance_to(&position);
let rssi = ant.get_rssi(d);
let noise: f64 = noise_gen.sample(&mut rand::thread_rng());
report
.data
.push(BeaconMeasure::new(&config.id, &ant.id, rssi + noise));
}
let payload = serde_json::to_string(&report).unwrap_or_else(|_| "".to_string());
client
.publish(topic.as_str(), Some(payload.as_str()))
.expect("Pub error");
// if config.real {
// let _r = KnownPosition::new(position).write_for("real").await;
// }
position.rotate_by(f64::to_radians(config.angle_step));
thread::sleep(period - start.elapsed());
}
}
fn parse_cli() -> Config {
use std::env;
let mut config = Config {
period_ms: 1000,
radius: 12.0,
noise_level: 0.0,
angle_step: 3.6,
id: "60:f2:62:01:a9:28".to_string(),
real: true,
};
let args = env::args().collect::<Vec<String>>();
for (i, arg) in args.iter().enumerate() {
match arg.as_str() {
"--noise" | "--noise-level" | "-n" => {
config.noise_level = args[i + 1].parse::<f64>().unwrap();
}
"--rad" | "--radious" | "-r" => {
config.radius = args[i + 1].parse::<f64>().unwrap();
}
"--period" | "-p" => {
config.period_ms = args[i + 1].parse::<u64>().unwrap();
}
"--angle" | "--step" => {
config.angle_step = args[i + 1].parse::<f64>().unwrap();
}
"--id" => {
config.id = args[i + 1].clone();
config.real = false;
}
_ => {}
}
}
config
}

View File

@@ -3,6 +3,18 @@ name = "xyz-engine"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [[test]]
name = "all"
path = "test/all.rs"
[dependencies] [dependencies]
paho-mqtt = { workspace = true }
tokio = { workspace = true }
dotenv = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
kairo-common = {path = "../kairo-common" }
itertools = "0.10.3"
futures = "0.3"

36
xyz-engine/src/handler.rs Normal file
View File

@@ -0,0 +1,36 @@
pub mod device {
use kairo_common::{
influx::{self, Bucket},
unit_conversion::UnitsConversion,
BeaconMeasure, DeviceReport, MAC,
};
use crate::position_solver::solve_for;
pub async fn report(device_id: &str, payload: &str) {
if let Ok(device_report) = serde_json::from_str::<DeviceReport>(payload) {
// split the report into individual measures
let measures = device_report
.data
.iter()
.map(|f| BeaconMeasure::new(device_id, &f.beacon_id, f.rssi.dBm_to_W()))
// sort them as a vector of write queries
.collect::<Vec<BeaconMeasure>>();
let more_than_three = measures.len() >= 3;
let result = influx::Client::get()
.write(Bucket::Tmp, futures::stream::iter(measures))
.await;
// If I added more than 3 valid measures it's worth to process the position
if result.is_ok() && more_than_three {
let device_id = MAC::new(device_id);
tokio::spawn(async move {
let _r = solve_for(device_id).await;
});
}
} else {
println!("Unable to parse: {}", payload);
}
}
}

View File

@@ -1,3 +1,35 @@
fn main() { use futures::stream::StreamExt;
println!("Hello, world!");
mod handler;
mod position_solver;
use kairo_common::mqtt::for_async::{get_mqtt_cli_and_stream, mqtt_cli_reconnect, mqtt_subscribe};
#[tokio::main]
async fn main() {
let (mqtt_cli, mut stream) = get_mqtt_cli_and_stream().await;
let topic = "device/+/report";
println!("Subscribing to topic: {:?}", topic);
mqtt_subscribe(&mqtt_cli, topic).await;
while let Some(msg) = stream.next().await {
if let Some(msg) = msg {
// split the topic first
let topic: Vec<&str> = msg.topic().splitn(3, '/').collect();
match topic[0] {
"device" => match topic[2] {
"report" => handler::device::report(topic[1], &msg.payload_str()).await,
_ => println!("Unhandled topic for device: {}", topic[2]),
},
_ => println!("Unhandled topic: {}", msg.topic()),
}
} else {
// A "None" means we were disconnected. Try to reconnect...
mqtt_cli_reconnect(&mqtt_cli).await;
mqtt_subscribe(&mqtt_cli, topic).await;
}
}
} }

View File

@@ -0,0 +1,198 @@
use itertools::Itertools;
use std::collections::HashMap;
use kairo_common::{
influx::{self, Bucket},
Antenna, BeaconMeasure, KnownPosition, Point, MAC,
};
struct KnownDistance {
point: Point,
dist: f64,
}
pub async fn solve_for(device_id: MAC) -> Result<Point, ()> {
let antennas = anntennas_hashmap();
// let measure = BeaconMeasure::get_for(device_id.as_str()).await.unwrap();
let query = format!(
"
|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"beacon_measures\")
|> filter(fn: (r) => r[\"device_id\"] == \"{}\" )
",
device_id
);
let measure = influx::Client::get()
.query::<BeaconMeasure>(Bucket::Tmp, query)
.await
.unwrap();
let known_distance = measure
.iter()
.filter_map(|m| {
if let Some(a) = antennas.get(&m.beacon_id) {
let kd = KnownDistance {
point: a.coord(),
dist: a.get_distance_with_W(m.rssi),
};
Some(kd)
} else {
None
}
})
.collect::<Vec<KnownDistance>>();
let mut posible_positions = known_distance
.iter()
.permutations(3)
.filter_map(|per| trilat(per[0], per[1], per[2]))
.collect::<Vec<KnownDistance>>();
print!("Old len(): {} \t", posible_positions.len());
let query = format!(
"|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"known_positions\")
|> filter(fn: (r) => r[\"device_id\"] == \"{}\" )
|> last()
",
device_id
);
if let Ok(last_position) = influx::Client::get()
.query::<KnownPosition>(Bucket::Perm, query)
.await
{
if !last_position.is_empty() {
let last_position = Point::new(last_position[0].x, last_position[0].y);
posible_positions.retain(|p| last_position.distance_to(&p.point) < 3.0);
}
}
println!("New len(): {}", posible_positions.len());
let mut pos = Point::new(0.0, 0.0);
let mut divisor = 0.0;
for p in posible_positions.iter() {
pos.x += p.point.x / p.dist;
pos.y += p.point.y / p.dist;
divisor += 1.0 / p.dist;
}
pos /= divisor;
println!("Pos: {}", pos);
let known_pos = KnownPosition::new(device_id.as_str(), pos);
let _r = influx::Client::get()
.write(Bucket::Perm, futures::stream::iter([known_pos]))
.await;
Ok(pos)
}
fn trilat(a: &KnownDistance, b: &KnownDistance, c: &KnownDistance) -> Option<KnownDistance> {
#![allow(non_snake_case)]
let points = vec![a.point, b.point, c.point];
for &p in points.iter() {
if !p.is_valid() {
return None;
}
}
// We have two triangles that share a side,
// Da and Db are both a hypotenuse,
// h is the shared side
// D is the lineal sum of both coaxial sides.
// P
// /|\
// / | \
// Da/ |h \Db
// / | \
// / d1 | d2 \
// *-----------*
// A B => D = BA
let D = (b.point - a.point).module();
let d1 = (D.powi(2) + a.dist.powi(2) - b.dist.powi(2)) / (2.0 * D);
let h = f64::sqrt(a.dist.powi(2) - d1.powi(2));
if h.is_nan() {
return None;
}
// With points A and B, we can find the Position P, but we the fact is that there are
// two posible solutions, we build a rhombus with both posible P:
let D_ver = (b.point - a.point).as_versor().unwrap();
let mut upper = D_ver * a.dist;
let mut downer = D_ver * a.dist;
// we need to rotate that direction by alpha and -alpha
let alpha = f64::tan(h / d1);
upper.rotate_by(alpha);
downer.rotate_by(-alpha);
// Now we have two vectors with |Da| that point from A where the two posible positions are
let P = [a.point + upper, a.point + downer];
//Now we need to see which P[0] or P[1] is at distance Dc from pointC.
//But since all numbers we got (Da,Db and Dc) cointain a lot of error and noise
// we know that they won't be the same number so we need to pick the point that makes the distance to pointC the closest to Dc
let dist_to_C = [P[0].distance_to(&c.point), P[1].distance_to(&c.point)];
let error = [
f64::abs(dist_to_C[0] - c.dist),
f64::abs(dist_to_C[1] - c.dist),
];
if error[0] < error[1] {
Some(KnownDistance {
point: P[0],
dist: error[0],
})
} else {
Some(KnownDistance {
point: P[1],
dist: error[1],
})
}
}
fn anntennas_hashmap() -> HashMap<String, Antenna> {
let data = vec![
Antenna::new("e6:ad:0b:2e:d7:11", 30.0, Point::new(15.0, 15.0)),
Antenna::new("c2:b5:f5:cc:e6:88", 30.0, Point::new(15.0, -15.0)),
Antenna::new("e6:2e:e6:88:f5:cc", 30.0, Point::new(-15.0, 15.0)),
Antenna::new("c2:ad:0b:b5:11:d7", 30.0, Point::new(-15.0, -15.0)),
];
let mut map: HashMap<String, Antenna> = HashMap::new();
for a in data.iter() {
map.insert(a.id.clone(), a.clone());
}
map
}
#[test]
fn test_trilat() {
let a = KnownDistance {
dist: 6.3,
point: Point::new(0.0, 0.0),
};
let b = KnownDistance {
dist: 3.1,
point: Point::new(5.0, 6.5),
};
let c = KnownDistance {
dist: 5.5,
point: Point::new(9.0, 0.0),
};
let pos = trilat(&a, &b, &c).unwrap();
let expected = Point::new(5.0, 3.5);
assert!(f64::abs(pos.point.x - expected.x) < 0.5);
assert!(f64::abs(pos.point.y - expected.y) < 0.5);
}

1
xyz-engine/test/all.rs Normal file
View File

@@ -0,0 +1 @@