Refactor Models for InfluxDB (#1)

Update to InfluxDB 2.0 and update all interfaces to work with it.

MAC has been deprecated, since current `influxdb2` doesn't support non built-in types for read/write into the DB.

Co-authored-by: Felipe Diniello <felipediniello@pm.me>
Reviewed-on: #1
This commit was merged in pull request #1.
This commit is contained in:
2023-06-18 18:43:15 +02:00
parent a5976252e8
commit 962b90e1b8
22 changed files with 440 additions and 397 deletions

View File

@@ -9,9 +9,14 @@ edition = "2021"
[dependencies]
paho-mqtt = { workspace = true }
influxdb = { workspace = true }
tokio = { workspace = true }
dotenv = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_json = { workspace = true }
influxdb2 = "0.4.2"
influxdb2-structmap = "0.2"
influxdb2-derive = "0.1.1"
futures = "0.3.28"
num-traits = "0.2"

View File

@@ -1,30 +1,4 @@
pub mod for_async {
use influxdb::Client;
use std::cell::RefCell;
thread_local! {
static INFLUX_CLIENT : RefCell<influxdb::Client> = RefCell::new( init_influx_cli() );
}
pub fn get_influx_cli() -> influxdb::Client {
INFLUX_CLIENT.with(|rc| rc.borrow().clone())
}
fn init_influx_cli() -> influxdb::Client {
let host = dotenv::var("INFLUX_URL").unwrap_or_else(|_| {
println! {"INFLUX_URL not found in .evn file, using default: http://localhost:8086"};
"http://localhost:8086".to_string()
});
let db = dotenv::var("INFLUX_DB").expect("INFLUX_DB not defined in .env file");
let user = dotenv::var("INFLUX_USER").expect("INFLUX_USER not defined in .env file");
let pass =
dotenv::var("INFLUX_PASSWORD").expect("INFLUX_PASSWORD not defined in .env file");
Client::new(host, db).with_auth(user, pass)
}
use mqtt::{AsyncClient, Message};
use paho_mqtt as mqtt;
use std::{process, time::Duration};

123
kairo-common/src/influx.rs Normal file
View File

@@ -0,0 +1,123 @@
use std::cell::RefCell;
#[derive(Debug, Clone)]
pub struct Client {
client: influxdb2::Client,
// We should get two buckets, one temp and a permanent, but for now we'll use just one
bucket: String,
}
pub enum Bucket {
Tmp,
Perm,
}
thread_local! {
static INFLUX_CLIENT : RefCell<Client> = Client::new();
}
impl Client {
fn new() -> RefCell<Client> {
let host = dotenv::var("INFLUX_HOST").unwrap_or_else(|_| {
println! {"INFLUX_HOST not found in .env file, using default: http://localhost:8086"};
"http://localhost:8086".to_string()
});
let bucket = dotenv::var("INFLUX_BUCKET").expect("INFLUX_BUCKET not defined in .env file");
let org = dotenv::var("INFLUX_ORG").expect("INFLUX_ORG not defined in .env file");
let token = dotenv::var("INFLUX_TOKEN").expect("INFLUX_TOKEN not defined in .env file");
RefCell::new(Client {
client: influxdb2::Client::new(host, org, token),
bucket,
})
}
pub fn get() -> Client {
INFLUX_CLIENT.with(|rc| rc.borrow().clone())
}
pub async fn write(
&self,
_bucket: Bucket,
body: impl futures::Stream<Item = impl influxdb2::models::WriteDataPoint>
+ Send
+ Sync
+ 'static,
) -> Result<(), influxdb2::RequestError> {
// TODO: use _bucket to choose from internal list
self.client.write(self.bucket.as_str(), body).await
}
pub async fn query<T>(
&self,
_bucket: Bucket,
q: String,
) -> Result<Vec<T>, influxdb2::RequestError>
where
T: influxdb2_structmap::FromMap,
{
// TODO: use _bucket to choose from internal list
let from_bucket = format!("from(bucket: \"{}\")", self.bucket);
let query = from_bucket + &q;
let query = influxdb2::models::Query::new(query);
self.client.query::<T>(Some(query)).await
}
}
#[cfg(test)]
mod test {
use crate::influx::{Bucket, Client};
#[tokio::test]
async fn test_new_get_cli() {
let health = Client::get().client.health().await;
assert!(health.is_ok())
}
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
#[derive(Default, Debug, PartialEq, FromDataPoint, WriteDataPoint)]
#[measurement = "stock_prices"]
struct StockPrice {
#[influxdb(tag)]
ticker: String,
#[influxdb(field)]
value: f64,
#[influxdb(timestamp)]
time: i64,
}
#[tokio::test]
async fn test_write_then_query() {
let time = chrono::Utc::now().timestamp_nanos();
let w = StockPrice {
ticker: "ASDF".into(),
value: 150.5,
time: time,
};
let res = Client::get()
.write(Bucket::Perm, futures::stream::iter([w]))
.await;
assert!(res.is_ok());
let query = format!(
"
|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"stock_prices\")
|> filter(fn: (r) => r[\"ticker\"] == \"ASDF\")
|> sort(columns: [\"time\"], desc: true)
"
);
let r = Client::get()
.query::<StockPrice>(Bucket::Perm, query)
.await
.unwrap();
assert!(r.len() > 0);
assert_eq!(r[0].ticker, "ASDF");
assert_eq!(r[0].value, 150.5);
}
}

View File

@@ -1,80 +0,0 @@
use chrono::{DateTime, Utc};
use influxdb::{InfluxDbWriteable, ReadQuery};
use serde::{Deserialize, Serialize};
use crate::helper::for_async::get_influx_cli;
use crate::influxdb_models::BEACONMEASURE_TIME_WINDOW;
use crate::MAC;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, InfluxDbWriteable)]
pub struct BeaconMeasure {
#[influxdb(tag)]
pub beacon_id: MAC,
pub rssi: f64,
pub time: DateTime<Utc>,
}
#[derive(Serialize, Deserialize)]
struct Tags {
beacon_id: MAC,
}
impl BeaconMeasure {
#[allow(non_snake_case)]
pub fn new(beacon_id: &MAC, rssi_W: f64) -> BeaconMeasure {
BeaconMeasure {
beacon_id: *beacon_id,
rssi: rssi_W,
time: chrono::Utc::now(),
}
}
pub async fn write_for(self, device_id: &str) -> Result<String, influxdb::Error> {
let table_name = format!("measure_{}", device_id);
get_influx_cli()
.query(self.into_query(table_name.as_str()))
.await
}
pub async fn get_for(device_id: &str) -> Result<Vec<BeaconMeasure>, influxdb::Error> {
let query = format!( "SELECT mean(rssi) FROM /measure_{}/ WHERE time > now() - {}s AND time < now() GROUP BY beacon_id;", device_id, BEACONMEASURE_TIME_WINDOW);
let mut database_result = get_influx_cli().json_query(ReadQuery::new(query)).await?;
#[derive(Deserialize)]
struct Value {
time: DateTime<Utc>,
mean: f64,
}
let vect = database_result
.deserialize_next_tagged::<Tags, Value>()?
.series
.into_iter()
.map(|measure| BeaconMeasure {
beacon_id: measure.tags.beacon_id,
rssi: measure.values[0].mean,
time: measure.values[0].time,
})
.collect::<Vec<BeaconMeasure>>();
Ok(vect)
}
}
#[tokio::test]
async fn beacon_measure_test() {
print!("Testing BeaconMeasure::* read/write methods");
let bm1 = BeaconMeasure::new(&MAC::new("AB:CD:EF:12:34:56"), 0.0);
let bm = bm1.clone();
let _result = bm.write_for("AB:CD:EF:12:34:56").await;
let bm2 = BeaconMeasure::get_for("AB:CD:EF:12:34:56").await.unwrap();
assert_eq!(bm2.len(), 1);
assert_eq!(bm1.beacon_id, bm2[0].beacon_id);
assert_eq!(bm1.rssi, bm2[0].rssi);
//wait for the time window to pass
let delay = BEACONMEASURE_TIME_WINDOW * 1000 + 500;
tokio::time::sleep(tokio::time::Duration::from_millis(delay)).await;
let bm2 = BeaconMeasure::get_for("AB:CD:EF:12:34:56").await.unwrap();
assert_eq!(bm2.len(), 0);
println!(" ... ok");
}

View File

@@ -1,95 +0,0 @@
use influxdb::{ReadQuery, WriteQuery};
use serde::{Deserialize, Serialize};
use crate::{helper::for_async::get_influx_cli, MAC};
const TABLE_NAME: &str = "device_status";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeviceStatus {
device_id: MAC,
pos_x: f64,
pos_y: f64,
error: f64,
speed: f64,
}
impl DeviceStatus {
fn new(device_id: MAC) -> DeviceStatus {
DeviceStatus {
device_id,
pos_x: 0.0,
pos_y: 0.0,
error: 0.0,
speed: 0.0,
}
}
pub async fn get(device_id: MAC) -> Result<Box<DeviceStatus>, influxdb::Error> {
let query = ReadQuery::new(format!(
"SELECT last(*) FROM /{}/ WHERE device_id = '{}';",
TABLE_NAME, device_id
));
let mut database_result = get_influx_cli().json_query(query).await?;
#[derive(Debug, Deserialize)]
struct Value {
last_pos_x: f64,
last_pos_y: f64,
last_error: f64,
last_speed: f64,
}
let vec = database_result.deserialize_next::<Value>()?.series;
if !vec.is_empty() && !vec[0].values.is_empty() {
Ok(Box::new(DeviceStatus {
device_id,
pos_x: vec[0].values[0].last_pos_x,
pos_y: vec[0].values[0].last_pos_y,
error: vec[0].values[0].last_error,
speed: vec[0].values[0].last_speed,
}))
} else {
Ok(Box::new(DeviceStatus::new(device_id)))
}
}
fn as_query(&self) -> influxdb::WriteQuery {
WriteQuery::new(influxdb::Timestamp::from(chrono::Utc::now()), TABLE_NAME)
.add_tag("device_id", self.device_id)
.add_field("pos_x", self.pos_x)
.add_field("pos_y", self.pos_y)
.add_field("error", self.error)
.add_field("speed", self.speed)
}
async fn update(query: influxdb::WriteQuery) -> Result<String, influxdb::Error> {
println!("update");
get_influx_cli().query(query).await
}
}
impl Drop for DeviceStatus {
fn drop(&mut self) {
println!("drop");
let query = self.as_query();
tokio::runtime::Handle::current().spawn(async move { Self::update(query).await });
}
}
#[tokio::test]
async fn test() {
use std::time::Duration;
// create context to call drop
{
let mut a = DeviceStatus::get(MAC::new("15:23:45:ab:cd:ef"))
.await
.unwrap();
a.pos_x += 2.0;
a.pos_y += 3.0;
println!("{:?}", a);
} //here and then wait
tokio::time::sleep(Duration::from_millis(150)).await;
}

View File

@@ -1,48 +0,0 @@
use chrono::{DateTime, Utc};
use influxdb::{InfluxDbWriteable, ReadQuery};
use serde::{Deserialize, Serialize};
use crate::helper::for_async::get_influx_cli;
use crate::Point;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, InfluxDbWriteable)]
pub struct KnownPosition {
pub x: f64,
pub y: f64,
pub time: DateTime<Utc>,
}
impl KnownPosition {
pub fn new(pos: Point) -> KnownPosition {
KnownPosition {
x: pos.x,
y: pos.y,
time: chrono::Utc::now(),
}
}
pub async fn write_for(self, device_id: &str) -> Result<String, influxdb::Error> {
let table_name = format!("position_{}", device_id);
get_influx_cli()
.query(self.into_query(table_name.as_str()))
.await
}
pub async fn get_last_for(
device_id: &str,
time_window: i32,
) -> Result<Option<KnownPosition>, influxdb::Error> {
let query = format!(
"SELECT mean(x) as x, mean(y) as y FROM /position_{}/ WHERE time > now() - {}s AND time < now();",
device_id, time_window
);
let mut database_result = get_influx_cli().json_query(ReadQuery::new(query)).await?;
let series = &database_result.deserialize_next::<KnownPosition>()?.series;
if series.is_empty() {
Ok(None)
} else {
let vec = &series[0].values;
Ok(Some(vec[0].clone()))
}
}
}

View File

@@ -1,10 +0,0 @@
// pub mod multiple_measures;
mod beacon_measure;
mod device_status;
mod known_position;
// Renaming types for ease of use outside the scope of this module
pub const BEACONMEASURE_TIME_WINDOW: u64 = 4;
pub type BeaconMeasure = beacon_measure::BeaconMeasure;
pub type KnownPosition = known_position::KnownPosition;
pub type DeviceStatus = device_status::DeviceStatus;

View File

@@ -1,21 +1,23 @@
#![allow(confusable_idents)]
#![allow(clippy::upper_case_acronyms)]
#![allow(mixed_script_confusables)]
#![allow(non_upper_case_globals)]
#![allow(confusable_idents)]
use serde::{Deserialize, Serialize};
pub mod influx;
pub mod helper;
pub mod influxdb_models;
mod mac;
pub mod unit_conversion;
pub type Antenna = antenna::Antenna;
pub type Point = point::Point;
pub type MAC = mac::MAC;
mod antenna;
mod point;
#[derive(Debug, Serialize, Deserialize)]
pub struct DeviceReport {
pub data: Vec<influxdb_models::BeaconMeasure>,
mod types {
pub mod mac;
pub mod point;
}
pub type Point = types::point::Point;
pub type MAC = types::mac::MAC;
mod models;
pub type Antenna = models::antenna::Antenna;
pub type DeviceReport = models::DeviceReport;
pub type KnownPosition = models::known_position::KnownPosition;
pub type DynamicDeviceStatus = models::dynamic_device_status::DynamicDeviceStatus;
pub type BeaconMeasure = models::beacon_measure::BeaconMeasure;

View File

@@ -1,23 +1,24 @@
use std::{f64::consts::PI, str::FromStr};
use std::f64::consts::PI;
use crate::{unit_conversion::UnitsConversion, Point, MAC};
use crate::{unit_conversion::UnitsConversion, Point};
#[derive(Debug, Clone, Default)]
pub struct Antenna {
pub id: MAC,
pub id: String,
pub tssi: f64,
pub coord: Point,
pub comment: Option<String>,
}
impl Antenna {
const C: f64 = 2.99e8;
const F: f64 = 2.4e9;
#[allow(non_upper_case_globals)]
const λ: f64 = Self::C / Self::F;
pub fn new(id: &str, tssi: f64, coord: Point) -> Antenna {
Antenna {
id: MAC::from_str(id).unwrap(),
id: id.into(),
comment: None,
coord,
tssi,
}
@@ -29,12 +30,14 @@ impl Antenna {
let FSPL = (((distance * 4.0 * PI) / Self::λ).powi(2)).to_dB();
self.tssi - FSPL
}
#[allow(non_snake_case)]
pub fn get_distance_with_dBm(&self, rssi_dBm: f64) -> f64 {
let loss = self.tssi.dBm_to_W() / rssi_dBm.dBm_to_W();
let distance = (loss.sqrt() * Self::λ) / (4.0 * PI);
distance.abs()
}
#[allow(non_snake_case)]
pub fn get_distance_with_W(&self, rssi_W: f64) -> f64 {
let loss = self.tssi.dBm_to_W() / rssi_W;

View File

@@ -0,0 +1,68 @@
use serde::{Deserialize, Serialize};
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
#[derive(
Debug, Default, PartialEq, Clone, Serialize, Deserialize, FromDataPoint, WriteDataPoint,
)]
#[measurement = "beacon_measures"]
pub struct BeaconMeasure {
#[influxdb(tag)]
pub device_id: String,
#[influxdb(tag)]
pub beacon_id: String,
pub rssi: f64,
#[influxdb(timestamp)]
pub time: i64,
}
impl BeaconMeasure {
#[allow(non_snake_case)]
pub fn new(device_id: &str, beacon_id: &str, rssi_W: f64) -> BeaconMeasure {
BeaconMeasure {
device_id: device_id.into(),
beacon_id: beacon_id.to_owned(),
rssi: rssi_W,
time: chrono::Utc::now().timestamp_nanos(),
}
}
}
#[cfg(test)]
mod test {
use crate::influx::{Bucket, Client};
use crate::BeaconMeasure;
#[tokio::test]
async fn influx_test() {
let device_id = String::from("AB:CD:EF:01:23:45");
let beacon_id = String::from("01:23:45:AB:CD:EF");
let rssi_w = 0.001;
let bm = BeaconMeasure::new(&device_id, &beacon_id, rssi_w);
let res = Client::get()
.write(Bucket::Tmp, futures::stream::iter([bm]))
.await;
assert!(res.is_ok());
let query = format!(
"
|> range(start: -1s)
|> filter(fn: (r) => r[\"_measurement\"] == \"beacon_measures\")
|> filter(fn: (r) => r[\"beacon_id\"] == \"{}\" )
",
beacon_id
);
let r = Client::get()
.query::<BeaconMeasure>(Bucket::Tmp, query)
.await
.unwrap();
assert!(r.len() > 0);
assert_eq!(r[0].beacon_id, beacon_id);
assert_eq!(r[0].device_id, device_id);
assert_eq!(r[0].rssi, rssi_w);
}
}

View File

@@ -0,0 +1,10 @@
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DynamicDeviceStatus {
id: String,
pos_x: f64,
pos_y: f64,
pos_z: f64,
speed_x: f64,
speed_y: f64,
pub last_seen: chrono::DateTime<chrono::Utc>,
}

View File

@@ -0,0 +1,31 @@
use serde::{Deserialize, Serialize};
use influxdb2_derive::{FromDataPoint, WriteDataPoint};
use crate::Point;
#[derive(
Debug, Default, PartialEq, Clone, Serialize, Deserialize, FromDataPoint, WriteDataPoint,
)]
#[measurement = "known_positions"]
pub struct KnownPosition {
#[influxdb(tag)]
pub id: String,
pub x: f64,
pub y: f64,
pub z: f64,
#[influxdb(timestamp)]
pub time: i64,
}
impl KnownPosition {
pub fn new(device_id: &str, pos: Point) -> KnownPosition {
KnownPosition {
id: device_id.into(),
time: chrono::Utc::now().timestamp_nanos(),
x: pos.x,
y: pos.y,
z: 0.0,
}
}
}

View File

@@ -0,0 +1,9 @@
pub mod antenna;
pub mod beacon_measure;
pub mod dynamic_device_status;
pub mod known_position;
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct DeviceReport {
pub data: Vec<crate::models::beacon_measure::BeaconMeasure>,
}

View File

@@ -1,11 +1,3 @@
use std::fmt::{Debug, Display, Formatter};
use std::str::FromStr;
use influxdb::Type;
use serde::de::{self, Visitor};
use serde::{Deserialize, Serialize, Serializer};
#[allow(clippy::upper_case_acronyms)]
#[derive(Default, Clone, Copy, Hash, PartialEq, Eq)]
pub struct MAC {
s: [u8; 17],
@@ -15,13 +7,18 @@ impl MAC {
pub fn new(s: &str) -> MAC {
std::str::FromStr::from_str(s).unwrap()
}
pub fn as_str(&self) -> &str {
let a = std::str::from_utf8(&self.s);
a.unwrap()
}
}
impl FromStr for MAC {
////////////////////////////////////////////////////
// Standard implementations:
//
impl std::str::FromStr for MAC {
type Err = std::string::ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut m = MAC::default();
@@ -30,25 +27,45 @@ impl FromStr for MAC {
}
}
impl Display for MAC {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
impl std::fmt::Display for MAC {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(&self.s))
}
}
impl Debug for MAC {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
impl std::fmt::Debug for MAC {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(&self.s))
}
}
impl From<MAC> for Type {
////////////////////////////////////////////////////
// Influx implementations:
//
#[cfg(influxdb)]
impl From<MAC> for influxdb::Type {
fn from(val: MAC) -> Self {
Type::Text(val.to_string())
influxdb::Type::Text(val.to_string())
}
}
impl<'de> Deserialize<'de> for MAC {
impl influxdb2::writable::KeyWritable for MAC {
fn encode_key(&self) -> String {
format!("{}", self)
}
}
impl influxdb2::writable::ValueWritable for MAC {
fn encode_value(&self) -> String {
format!("{}", self)
}
}
////////////////////////////////////////////////////
// Serde implementations:
//
impl<'de> serde::Deserialize<'de> for MAC {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
@@ -57,7 +74,7 @@ impl<'de> Deserialize<'de> for MAC {
len: usize,
}
impl<'de> Visitor<'de> for MACVisitor {
impl<'de> serde::de::Visitor<'de> for MACVisitor {
type Value = MAC;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a string containing at least {} bytes", self.len)
@@ -65,12 +82,15 @@ impl<'de> Deserialize<'de> for MAC {
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: de::Error,
E: serde::de::Error,
{
if s.len() == self.len {
Ok(MAC::new(s))
} else {
Err(de::Error::invalid_value(de::Unexpected::Str(s), &self))
Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Str(s),
&self,
))
}
}
}
@@ -80,10 +100,13 @@ impl<'de> Deserialize<'de> for MAC {
}
}
impl Serialize for MAC {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
impl serde::Serialize for MAC {
fn serialize<S>(
&self,
serializer: S,
) -> Result<<S as serde::Serializer>::Ok, <S as serde::Serializer>::Error>
where
S: Serializer,
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}

View File

@@ -61,6 +61,7 @@ impl ops::Add<Point> for Point {
}
}
}
impl ops::Add<&Point> for &Point {
type Output = Point;
fn add(self, rhs: &Point) -> Point {
@@ -70,6 +71,7 @@ impl ops::Add<&Point> for &Point {
}
}
}
impl ops::AddAssign<&Point> for Point {
fn add_assign(&mut self, rhs: &Point) {
*self = Self {
@@ -78,6 +80,7 @@ impl ops::AddAssign<&Point> for Point {
};
}
}
impl ops::AddAssign<Point> for Point {
fn add_assign(&mut self, rhs: Point) {
*self = Self {
@@ -86,6 +89,7 @@ impl ops::AddAssign<Point> for Point {
};
}
}
impl ops::SubAssign<&Point> for Point {
fn sub_assign(&mut self, rhs: &Point) {
*self = Self {
@@ -94,6 +98,7 @@ impl ops::SubAssign<&Point> for Point {
};
}
}
impl ops::SubAssign<Point> for Point {
fn sub_assign(&mut self, rhs: Point) {
*self = Self {
@@ -102,6 +107,7 @@ impl ops::SubAssign<Point> for Point {
};
}
}
impl ops::Sub<Point> for Point {
type Output = Point;
fn sub(self, rhs: Point) -> Point {
@@ -111,6 +117,7 @@ impl ops::Sub<Point> for Point {
}
}
}
impl ops::Sub<&Point> for &Point {
type Output = Point;
fn sub(self, rhs: &Point) -> Point {
@@ -130,6 +137,7 @@ impl ops::Mul<f64> for Point {
}
}
}
impl ops::MulAssign<f64> for Point {
fn mul_assign(&mut self, rhs: f64) {
*self = Point {
@@ -138,6 +146,7 @@ impl ops::MulAssign<f64> for Point {
}
}
}
impl ops::Mul<f64> for &Point {
type Output = Point;
fn mul(self, rhs: f64) -> Point {
@@ -147,6 +156,7 @@ impl ops::Mul<f64> for &Point {
}
}
}
impl ops::Div<f64> for Point {
type Output = Point;
fn div(self, rhs: f64) -> Point {
@@ -156,6 +166,7 @@ impl ops::Div<f64> for Point {
}
}
}
impl ops::DivAssign<f64> for Point {
fn div_assign(&mut self, rhs: f64) {
*self = Point {
@@ -164,6 +175,7 @@ impl ops::DivAssign<f64> for Point {
}
}
}
impl ops::Div<f64> for &Point {
type Output = Point;
fn div(self, rhs: f64) -> Point {