2018-02-10 00:00:55 +00:00
|
|
|
///
|
|
|
|
/// Macros
|
|
|
|
///
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! err {
|
2018-08-15 14:50:07 +00:00
|
|
|
($err:expr, $msg:expr) => {{
|
2018-12-06 19:35:25 +00:00
|
|
|
error!("{}", $msg);
|
2018-02-15 18:05:57 +00:00
|
|
|
err_json!(json!({
|
2018-10-10 18:37:04 +00:00
|
|
|
"error": $err,
|
|
|
|
"error_description": $err,
|
|
|
|
"ErrorModel": {
|
|
|
|
"Message": $msg,
|
|
|
|
"ValidationErrors": null,
|
2018-08-15 14:50:07 +00:00
|
|
|
"ExceptionMessage": null,
|
|
|
|
"ExceptionStackTrace": null,
|
|
|
|
"InnerExceptionMessage": null,
|
2018-10-10 18:37:04 +00:00
|
|
|
"Object": "error"
|
|
|
|
}}))
|
2018-07-13 13:58:50 +00:00
|
|
|
}};
|
2018-10-10 18:37:04 +00:00
|
|
|
($msg:expr) => { err!("unknown_error", $msg) }
|
2018-02-10 00:00:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! err_json {
|
|
|
|
($expr:expr) => {{
|
2018-12-07 01:05:45 +00:00
|
|
|
return Err(rocket::response::status::BadRequest(Some(rocket_contrib::json::Json($expr))));
|
2018-02-10 00:00:55 +00:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! err_handler {
|
|
|
|
($expr:expr) => {{
|
2018-12-06 19:35:25 +00:00
|
|
|
error!("{}", $expr);
|
2018-12-07 01:05:45 +00:00
|
|
|
return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $expr));
|
2018-02-10 00:00:55 +00:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
///
|
|
|
|
/// File handling
|
|
|
|
///
|
|
|
|
|
|
|
|
use std::path::Path;
|
|
|
|
use std::io::Read;
|
2018-02-14 23:40:34 +00:00
|
|
|
use std::fs::{self, File};
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
pub fn file_exists(path: &str) -> bool {
|
|
|
|
Path::new(path).exists()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn read_file(path: &str) -> Result<Vec<u8>, String> {
|
|
|
|
let mut file = File::open(Path::new(path))
|
|
|
|
.map_err(|e| format!("Error opening file: {}", e))?;
|
|
|
|
|
|
|
|
let mut contents: Vec<u8> = Vec::new();
|
|
|
|
|
|
|
|
file.read_to_end(&mut contents)
|
|
|
|
.map_err(|e| format!("Error reading file: {}", e))?;
|
|
|
|
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2018-02-14 23:40:34 +00:00
|
|
|
pub fn delete_file(path: &str) -> bool {
|
2018-02-15 18:05:57 +00:00
|
|
|
let res = fs::remove_file(path).is_ok();
|
|
|
|
|
|
|
|
if let Some(parent) = Path::new(path).parent() {
|
2018-02-22 23:38:54 +00:00
|
|
|
// If the directory isn't empty, this returns an error, which we ignore
|
|
|
|
// We only want to delete the folder if it's empty
|
|
|
|
fs::remove_dir(parent).ok();
|
2018-02-15 18:05:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
res
|
2018-02-14 23:40:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-06-11 13:44:37 +00:00
|
|
|
const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"];
|
2018-02-14 23:40:34 +00:00
|
|
|
|
|
|
|
pub fn get_display_size(size: i32) -> String {
|
|
|
|
let mut size = size as f64;
|
|
|
|
let mut unit_counter = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
if size > 1024. {
|
|
|
|
size /= 1024.;
|
|
|
|
unit_counter += 1;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Round to two decimals
|
|
|
|
size = (size * 100.).round() / 100.;
|
|
|
|
format!("{} {}", size, UNITS[unit_counter])
|
|
|
|
}
|
|
|
|
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
///
|
|
|
|
/// String util methods
|
|
|
|
///
|
|
|
|
|
|
|
|
use std::str::FromStr;
|
2018-09-13 18:59:51 +00:00
|
|
|
use std::ops::Try;
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
pub fn upcase_first(s: &str) -> String {
|
|
|
|
let mut c = s.chars();
|
|
|
|
match c.next() {
|
|
|
|
None => String::new(),
|
|
|
|
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-13 18:59:51 +00:00
|
|
|
pub fn try_parse_string<S, T, U>(string: impl Try<Ok = S, Error=U>) -> Option<T> where S: AsRef<str>, T: FromStr {
|
|
|
|
if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) {
|
2018-02-10 00:00:55 +00:00
|
|
|
Some(value)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-13 18:59:51 +00:00
|
|
|
pub fn try_parse_string_or<S, T, U>(string: impl Try<Ok = S, Error=U>, default: T) -> T where S: AsRef<str>, T: FromStr {
|
|
|
|
if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) {
|
|
|
|
value
|
|
|
|
} else {
|
|
|
|
default
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
///
|
|
|
|
/// Env methods
|
|
|
|
///
|
|
|
|
|
|
|
|
use std::env;
|
|
|
|
|
|
|
|
pub fn get_env<V>(key: &str) -> Option<V> where V: FromStr {
|
|
|
|
try_parse_string(env::var(key))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_env_or<V>(key: &str, default: V) -> V where V: FromStr {
|
|
|
|
try_parse_string_or(env::var(key), default)
|
|
|
|
}
|
|
|
|
|
2018-02-10 00:00:55 +00:00
|
|
|
///
|
|
|
|
/// Date util methods
|
|
|
|
///
|
|
|
|
|
|
|
|
use chrono::NaiveDateTime;
|
|
|
|
|
2018-06-11 13:44:37 +00:00
|
|
|
const DATETIME_FORMAT: &str = "%Y-%m-%dT%H:%M:%S%.6fZ";
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
pub fn format_date(date: &NaiveDateTime) -> String {
|
|
|
|
date.format(DATETIME_FORMAT).to_string()
|
|
|
|
}
|
2018-05-31 22:18:50 +00:00
|
|
|
|
|
|
|
///
|
|
|
|
/// Deserialization methods
|
|
|
|
///
|
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
use std::fmt;
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
use serde::de::{self, DeserializeOwned, Deserializer, MapAccess, SeqAccess, Visitor};
|
2018-07-12 19:46:50 +00:00
|
|
|
use serde_json::{self, Value};
|
|
|
|
|
|
|
|
pub type JsonMap = serde_json::Map<String, Value>;
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
#[derive(PartialEq, Serialize, Deserialize)]
|
|
|
|
pub struct UpCase<T: DeserializeOwned> {
|
|
|
|
#[serde(deserialize_with = "upcase_deserialize")]
|
|
|
|
#[serde(flatten)]
|
|
|
|
pub data: T,
|
|
|
|
}
|
|
|
|
|
2018-05-31 22:18:50 +00:00
|
|
|
/// https://github.com/serde-rs/serde/issues/586
|
|
|
|
pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
|
|
|
where T: DeserializeOwned,
|
|
|
|
D: Deserializer<'de>
|
|
|
|
{
|
2018-06-12 21:01:14 +00:00
|
|
|
let d = deserializer.deserialize_any(UpCaseVisitor)?;
|
|
|
|
T::deserialize(d).map_err(de::Error::custom)
|
2018-05-31 22:18:50 +00:00
|
|
|
}
|
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
struct UpCaseVisitor;
|
|
|
|
|
|
|
|
impl<'de> Visitor<'de> for UpCaseVisitor {
|
|
|
|
type Value = Value;
|
|
|
|
|
|
|
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
formatter.write_str("an object or an array")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
|
|
|
where A: MapAccess<'de>
|
|
|
|
{
|
2018-07-12 19:46:50 +00:00
|
|
|
let mut result_map = JsonMap::new();
|
2018-06-12 21:01:14 +00:00
|
|
|
|
|
|
|
while let Some((key, value)) = map.next_entry()? {
|
|
|
|
result_map.insert(upcase_first(key), upcase_value(&value));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Object(result_map))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
|
|
|
where A: SeqAccess<'de> {
|
|
|
|
let mut result_seq = Vec::<Value>::new();
|
|
|
|
|
|
|
|
while let Some(value) = seq.next_element()? {
|
|
|
|
result_seq.push(upcase_value(&value));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Array(result_seq))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upcase_value(value: &Value) -> Value {
|
|
|
|
if let Some(map) = value.as_object() {
|
|
|
|
let mut new_value = json!({});
|
|
|
|
|
|
|
|
for (key, val) in map {
|
|
|
|
let processed_key = _process_key(key);
|
|
|
|
new_value[processed_key] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
|
|
|
|
|
|
|
} else if let Some(array) = value.as_array() {
|
|
|
|
// Initialize array with null values
|
|
|
|
let mut new_value = json!(vec![Value::Null; array.len()]);
|
|
|
|
|
|
|
|
for (index, val) in array.iter().enumerate() {
|
|
|
|
new_value[index] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
|
|
|
|
|
|
|
} else {
|
|
|
|
value.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _process_key(key: &str) -> String {
|
|
|
|
match key.to_lowercase().as_ref() {
|
|
|
|
"ssn" => "SSN".into(),
|
|
|
|
_ => self::upcase_first(key)
|
|
|
|
}
|
2018-05-31 22:18:50 +00:00
|
|
|
}
|