about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--fun/defer_rs/examples/defer-with-error.rs12
-rw-r--r--fun/defer_rs/examples/defer.rs6
-rw-r--r--fun/defer_rs/examples/undefer.rs4
-rw-r--r--fun/paroxysm/src/keyword.rs3
-rw-r--r--fun/paroxysm/src/main.rs8
-rw-r--r--net/alcoholic_jwt/src/lib.rs74
-rw-r--r--net/alcoholic_jwt/src/tests.rs22
-rw-r--r--net/crimp/src/lib.rs137
-rw-r--r--net/crimp/src/tests.rs93
-rw-r--r--nix/nint/nint.rs55
-rw-r--r--ops/journaldriver/build.rs3
-rw-r--r--ops/journaldriver/src/main.rs88
-rw-r--r--ops/journaldriver/src/tests.rs32
-rw-r--r--ops/mq_cli/src/main.rs76
-rw-r--r--ops/posix_mq.rs/src/error.rs5
-rw-r--r--ops/posix_mq.rs/src/tests.rs3
-rw-r--r--tools/cheddar/build.rs9
-rw-r--r--tools/cheddar/src/bin/cheddar.rs5
-rw-r--r--tools/cheddar/src/lib.rs6
-rw-r--r--tools/rust-crates-advisory/check-security-advisory.rs94
-rw-r--r--tvix/src/bin/nix-store.rs3
-rw-r--r--users/Profpatsch/execline/exec_helpers.rs84
-rw-r--r--users/Profpatsch/git-db/git-db.rs39
-rw-r--r--users/Profpatsch/imap-idle.rs40
-rw-r--r--users/Profpatsch/netencode/netencode-mustache.rs29
-rw-r--r--users/Profpatsch/netencode/netencode.rs519
-rw-r--r--users/Profpatsch/netencode/pretty.rs73
-rw-r--r--users/Profpatsch/read-http.rs152
-rw-r--r--users/grfn/achilles/src/commands/eval.rs6
-rw-r--r--users/grfn/achilles/src/parser/expr.rs3
-rw-r--r--users/grfn/xanthous/server/src/main.rs7
-rw-r--r--users/grfn/xanthous/server/src/pty.rs3
-rw-r--r--users/riking/adventofcode-2020/day01/src/main.rs4
-rw-r--r--web/atward/build.rs9
-rw-r--r--web/converse/src/db.rs50
-rw-r--r--web/converse/src/errors.rs17
-rw-r--r--web/converse/src/handlers.rs178
-rw-r--r--web/converse/src/main.rs73
-rw-r--r--web/converse/src/models.rs10
-rw-r--r--web/converse/src/oidc.rs45
-rw-r--r--web/converse/src/render.rs43
-rw-r--r--web/converse/src/schema.rs7
42 files changed, 1253 insertions, 876 deletions
diff --git a/fun/defer_rs/examples/defer-with-error.rs b/fun/defer_rs/examples/defer-with-error.rs
index 26d56d77cf1b..f8b8a014131f 100644
--- a/fun/defer_rs/examples/defer-with-error.rs
+++ b/fun/defer_rs/examples/defer-with-error.rs
@@ -4,17 +4,17 @@ use std::rc::Rc;
 use std::sync::RwLock;
 
 struct Defer<F: Fn()> {
-    f: F
+    f: F,
 }
 
-impl <F: Fn()> Drop for Defer<F> {
+impl<F: Fn()> Drop for Defer<F> {
     fn drop(&mut self) {
         (self.f)()
     }
 }
 
 // Only added this for Go-syntax familiarity ;-)
-fn  defer<F: Fn()>(f: F) -> Defer<F> {
+fn defer<F: Fn()>(f: F) -> Defer<F> {
     Defer { f }
 }
 
@@ -29,7 +29,9 @@ type ErrorHandle<T> = Rc<RwLock<Option<T>>>;
 ///////////////////
 
 #[derive(Debug)] // Debug trait for some default way to print the type.
-enum Error { DropError }
+enum Error {
+    DropError,
+}
 
 fn main() {
     // Create a place to store the error.
@@ -60,7 +62,7 @@ fn main() {
 
     match *drop_err.read().unwrap() {
         Some(ref err) => println!("Oh no, an error occured: {:?}!", err),
-        None => println!("Phew, everything went well.")
+        None => println!("Phew, everything went well."),
     };
 }
 
diff --git a/fun/defer_rs/examples/defer.rs b/fun/defer_rs/examples/defer.rs
index eadac795f8bc..0c99d00c82df 100644
--- a/fun/defer_rs/examples/defer.rs
+++ b/fun/defer_rs/examples/defer.rs
@@ -1,17 +1,17 @@
 // Go's defer in Rust!
 
 struct Defer<F: Fn()> {
-    f: F
+    f: F,
 }
 
-impl <F: Fn()> Drop for Defer<F> {
+impl<F: Fn()> Drop for Defer<F> {
     fn drop(&mut self) {
         (self.f)()
     }
 }
 
 // Only added this for Go-syntax familiarity ;-)
-fn  defer<F: Fn()>(f: F) -> Defer<F> {
+fn defer<F: Fn()>(f: F) -> Defer<F> {
     Defer { f }
 }
 
diff --git a/fun/defer_rs/examples/undefer.rs b/fun/defer_rs/examples/undefer.rs
index 17ad8a6b5485..fa659de8910e 100644
--- a/fun/defer_rs/examples/undefer.rs
+++ b/fun/defer_rs/examples/undefer.rs
@@ -1,10 +1,10 @@
 // Go's defer in Rust, with a little twist!
 
 struct Defer<F: Fn()> {
-    f: F
+    f: F,
 }
 
-impl <F: Fn()> Drop for Defer<F> {
+impl<F: Fn()> Drop for Defer<F> {
     fn drop(&mut self) {
         (self.f)()
     }
diff --git a/fun/paroxysm/src/keyword.rs b/fun/paroxysm/src/keyword.rs
index 1b2b6ce592a0..fa40f5347a75 100644
--- a/fun/paroxysm/src/keyword.rs
+++ b/fun/paroxysm/src/keyword.rs
@@ -1,8 +1,7 @@
 use crate::models::{Entry, Keyword, NewEntry, NewKeyword};
 use diesel::pg::PgConnection;
 use diesel::prelude::*;
-use failure::format_err;
-use failure::Error;
+use failure::{format_err, Error};
 use std::borrow::Cow;
 
 /// Maximum number of times we'll follow a `see: ` pointer.
diff --git a/fun/paroxysm/src/main.rs b/fun/paroxysm/src/main.rs
index 11a0e7bf60b5..998d125bf41c 100644
--- a/fun/paroxysm/src/main.rs
+++ b/fun/paroxysm/src/main.rs
@@ -7,8 +7,7 @@ use crate::cfg::Config;
 use crate::keyword::KeywordDetails;
 use diesel::pg::PgConnection;
 use diesel::r2d2::{ConnectionManager, Pool};
-use failure::format_err;
-use failure::Error;
+use failure::{format_err, Error};
 use irc::client::prelude::*;
 use lazy_static::lazy_static;
 use log::{debug, info, warn};
@@ -153,8 +152,9 @@ impl App {
         // Use `nick` here, so things like "grfn: see glittershark" work.
         let val = if let Some(last) = chan_lastmsgs.get(nick_to_grab) {
             if last.starts_with("\x01ACTION ") {
-                // Yes, this is inefficient, but it's better than writing some hacky CTCP parsing code
-                // I guess (also, characters are hard, so just blindly slicing seems like a bad idea)
+                // Yes, this is inefficient, but it's better than writing some hacky CTCP parsing
+                // code I guess (also, characters are hard, so just blindly slicing
+                // seems like a bad idea)
                 format!(
                     "* {} {}",
                     nick_to_grab,
diff --git a/net/alcoholic_jwt/src/lib.rs b/net/alcoholic_jwt/src/lib.rs
index 4acd8d1e90db..297bf2a99085 100644
--- a/net/alcoholic_jwt/src/lib.rs
+++ b/net/alcoholic_jwt/src/lib.rs
@@ -67,23 +67,24 @@
 //!
 //! [JWKS]: https://tools.ietf.org/html/rfc7517
 
-#[macro_use] extern crate serde_derive;
+#[macro_use]
+extern crate serde_derive;
 
 extern crate base64;
 extern crate openssl;
 extern crate serde;
 extern crate serde_json;
 
-use base64::{URL_SAFE_NO_PAD, Config, DecodeError};
+use base64::{Config, DecodeError, URL_SAFE_NO_PAD};
 use openssl::bn::BigNum;
 use openssl::error::ErrorStack;
 use openssl::hash::MessageDigest;
-use openssl::pkey::{Public, PKey};
+use openssl::pkey::{PKey, Public};
 use openssl::rsa::Rsa;
 use openssl::sign::Verifier;
 use serde::de::DeserializeOwned;
 use serde_json::Value;
-use std::time::{UNIX_EPOCH, Duration, SystemTime};
+use std::time::{Duration, SystemTime, UNIX_EPOCH};
 
 #[cfg(test)]
 mod tests;
@@ -101,12 +102,16 @@ fn jwt_forgiving() -> Config {
 /// JWT algorithm used. The only supported algorithm is currently
 /// RS256.
 #[derive(Clone, Deserialize, Debug)]
-enum KeyAlgorithm { RS256 }
+enum KeyAlgorithm {
+    RS256,
+}
 
 /// Type of key contained in a JWT. The only supported key type is
 /// currently RSA.
 #[derive(Clone, Deserialize, Debug)]
-enum KeyType { RSA }
+enum KeyType {
+    RSA,
+}
 
 /// Representation of a single JSON Web Key. See [RFC
 /// 7517](https://tools.ietf.org/html/rfc7517#section-4).
@@ -146,7 +151,7 @@ impl JWKS {
 
 /// Representation of an undecoded JSON Web Token. See [RFC
 /// 7519](https://tools.ietf.org/html/rfc7519).
-struct JWT<'a> (&'a str);
+struct JWT<'a>(&'a str);
 
 /// Representation of a decoded and validated JSON Web Token.
 ///
@@ -217,15 +222,21 @@ pub enum ValidationError {
 type JWTResult<T> = Result<T, ValidationError>;
 
 impl From<ErrorStack> for ValidationError {
-    fn from(err: ErrorStack) -> Self { ValidationError::OpenSSL(err) }
+    fn from(err: ErrorStack) -> Self {
+        ValidationError::OpenSSL(err)
+    }
 }
 
 impl From<serde_json::Error> for ValidationError {
-    fn from(err: serde_json::Error) -> Self { ValidationError::JSON(err) }
+    fn from(err: serde_json::Error) -> Self {
+        ValidationError::JSON(err)
+    }
 }
 
 impl From<DecodeError> for ValidationError {
-    fn from(err: DecodeError) -> Self { ValidationError::InvalidBase64(err) }
+    fn from(err: DecodeError) -> Self {
+        ValidationError::InvalidBase64(err)
+    }
 }
 
 /// Attempt to extract the `kid`-claim out of a JWT's header claims.
@@ -266,9 +277,7 @@ pub fn token_kid(token: &str) -> JWTResult<Option<String>> {
 ///
 /// It is the user's task to ensure that the correct JWK is passed in
 /// for validation.
-pub fn validate(token: &str,
-                jwk: &JWK,
-                validations: Vec<Validation>) -> JWTResult<ValidJWT> {
+pub fn validate(token: &str, jwk: &JWK, validations: Vec<Validation>) -> JWTResult<ValidJWT> {
     let jwt = JWT(token);
     let public_key = public_key_from_jwk(&jwk)?;
     validate_jwt_signature(&jwt, public_key)?;
@@ -279,7 +288,7 @@ pub fn validate(token: &str,
     if parts.len() != 3 {
         // This is unlikely considering that validation has already
         // been performed at this point, but better safe than sorry.
-        return Err(ValidationError::InvalidComponents)
+        return Err(ValidationError::InvalidComponents);
     }
 
     // Perform claim validations before constructing the valid token:
@@ -351,7 +360,7 @@ fn validate_jwt_signature(jwt: &JWT, key: Rsa<Public>) -> JWTResult<()> {
     verifier.update(data.as_bytes())?;
 
     match verifier.verify(&sig)? {
-        true  => Ok(()),
+        true => Ok(()),
         false => Err(ValidationError::InvalidSignature),
     }
 }
@@ -362,7 +371,7 @@ fn validate_jwt_signature(jwt: &JWT, key: Rsa<Public>) -> JWTResult<()> {
 #[serde(untagged)]
 enum Audience {
     Single(String),
-    Multi(Vec<String>)
+    Multi(Vec<String>),
 }
 
 /// Internal helper struct for claims that are relevant for claim
@@ -376,15 +385,14 @@ struct PartialClaims {
 }
 
 /// Apply a single validation to the claim set of a token.
-fn apply_validation(claims: &PartialClaims,
-                    validation: Validation) -> Result<(), &'static str> {
+fn apply_validation(claims: &PartialClaims, validation: Validation) -> Result<(), &'static str> {
     match validation {
         // Validate that an 'iss' claim is present and matches the
         // supplied value.
-        Validation::Issuer(iss) => {
-            match claims.iss {
-                None => Err("'iss' claim is missing"),
-                Some(ref claim) => if *claim == iss {
+        Validation::Issuer(iss) => match claims.iss {
+            None => Err("'iss' claim is missing"),
+            Some(ref claim) => {
+                if *claim == iss {
                     Ok(())
                 } else {
                     Err("'iss' claim does not match")
@@ -394,15 +402,17 @@ fn apply_validation(claims: &PartialClaims,
 
         // Validate that an 'aud' claim is present and matches the
         // supplied value.
-        Validation::Audience(aud) => {
-            match claims.aud {
-                None => Err("'aud' claim is missing"),
-                Some(Audience::Single(ref claim)) => if *claim == aud {
+        Validation::Audience(aud) => match claims.aud {
+            None => Err("'aud' claim is missing"),
+            Some(Audience::Single(ref claim)) => {
+                if *claim == aud {
                     Ok(())
                 } else {
                     Err("'aud' claim does not match")
-                },
-                Some(Audience::Multi(ref claims)) => if claims.contains(&aud) {
+                }
+            }
+            Some(Audience::Multi(ref claims)) => {
+                if claims.contains(&aud) {
                     Ok(())
                 } else {
                     Err("'aud' claim does not match")
@@ -447,12 +457,12 @@ fn apply_validation(claims: &PartialClaims,
 }
 
 /// Apply all requested validations to a partial claim set.
-fn validate_claims(claims: PartialClaims,
-                   validations: Vec<Validation>) -> JWTResult<()> {
-    let validation_errors: Vec<_> = validations.into_iter()
+fn validate_claims(claims: PartialClaims, validations: Vec<Validation>) -> JWTResult<()> {
+    let validation_errors: Vec<_> = validations
+        .into_iter()
         .map(|v| apply_validation(&claims, v))
         .filter_map(|result| match result {
-            Ok(_)    => None,
+            Ok(_) => None,
             Err(err) => Some(err),
         })
         .collect();
diff --git a/net/alcoholic_jwt/src/tests.rs b/net/alcoholic_jwt/src/tests.rs
index 81890986f8a2..295d304ac98d 100644
--- a/net/alcoholic_jwt/src/tests.rs
+++ b/net/alcoholic_jwt/src/tests.rs
@@ -21,14 +21,19 @@ fn test_fragment_decoding() {
     let bignum = decode_fragment(fragment).expect("Failed to decode fragment");
 
     let expected = "19947781743618558124649689124245117083485690334420160711273532766920651190711502679542723943527557680293732686428091794139998732541701457212387600480039297092835433997837314251024513773285252960725418984381935183495143908023024822433135775773958512751261112853383693442999603704969543668619221464654540065497665889289271044207667765128672709218996183649696030570183970367596949687544839066873508106034650634722970893169823917299050098551447676778961773465887890052852528696684907153295689693676910831376066659456592813140662563597179711588277621736656871685099184755908108451080261403193680966083938080206832839445289";
-    assert_eq!(expected, format!("{}", bignum), "Decoded fragment should match ");
+    assert_eq!(
+        expected,
+        format!("{}", bignum),
+        "Decoded fragment should match "
+    );
 }
 
 #[test]
 fn test_decode_find_jwks() {
     let json = "{\"keys\":[{\"kty\":\"RSA\",\"alg\":\"RS256\",\"use\":\"sig\",\"kid\":\"mUjI\\/rIMLLtung35BKZfdbrqtlEAAYJ4JX\\/SKvnLxJc=\",\"n\":\"ngRRjNbXgPW29oNtF0JgsyyfTwPyEL0u_X16s453X2AOc33XGFxVKLEQ7R_TiMenaKcr-tPifYqgps_deyi0XOr4I3SOdOMtAVKDZJCANe--CANOHZb-meIfjKhCHisvT90fm5Apd6qPRVsXsZ7A8pmClZHKM5fwZUkBv8NsPLm2Xy2sGOZIiwP_7z8m3j0abUzniPQsx2b3xcWimB9vRtshFHN1KgPUf1ALQ5xzLfJnlFkCxC7kmOxKC7_NpQ4kJR_DKzKFV_r3HxTqf-jddHcXIrrMcLQXCSyeLQtLaz7whQ4F-EfL42z4XgwPr4ji3sct2gWL13EqlbE5DDxLKQ\",\"e\":\"GK7oLCDbNPAF59LhvyseqcG04hDnPs58qGYolr_HHmaR4lulWJ90ozx6e4Ut363yKG2p9vwvivR5UIC-aLPtqT2qr-OtjhBFzUFVaMGZ6mPCvMKk0AgMYdOHvWTgBSqQtNJTvl1yYLnhcWyoE2fLQhoEbY9qUyCBCEOScXOZRDpnmBtz5I8q5yYMV6a920J24T_IYbxHgkGcEU2SGg-b1cOMD7Rja7vCfV---CQ2pR4leQ0jufzudDoe7z3mziJm-Ihcdrz2Ujy5kPEMdz6R55prJ-ENKrkD_X4u5aSlSRaetwmHS3oAVkjr1JwUNbqnpM-kOqieqHEp8LUmez-Znw\"}]}";
     let jwks: JWKS = serde_json::from_str(json).expect("Failed to decode JWKS");
-    let jwk = jwks.find("mUjI/rIMLLtung35BKZfdbrqtlEAAYJ4JX/SKvnLxJc=")
+    let jwk = jwks
+        .find("mUjI/rIMLLtung35BKZfdbrqtlEAAYJ4JX/SKvnLxJc=")
         .expect("Failed to find required JWK");
 
     public_key_from_jwk(&jwk).expect("Failed to construct public key from JWK");
@@ -39,18 +44,21 @@ fn test_token_kid() {
     let jwt = "eyJraWQiOiI4ckRxOFB3MEZaY2FvWFdURVZRbzcrVGYyWXpTTDFmQnhOS1BDZWJhYWk0PSIsImFsZyI6IlJTMjU2IiwidHlwIjoiSldUIn0.eyJpc3MiOiJhdXRoLnRlc3QuYXByaWxhLm5vIiwiaWF0IjoxNTM2MDUwNjkzLCJleHAiOjE1MzYwNTQyOTMsInN1YiI6IjQyIiwiZXh0Ijoic21va2V0ZXN0IiwicHJ2IjoiYXJpc3RpIiwic2NwIjoicHJvY2VzcyJ9.gOLsv98109qLkmRK6Dn7WWRHLW7o8W78WZcWvFZoxPLzVO0qvRXXRLYc9h5chpfvcWreLZ4f1cOdvxv31_qnCRSQQPOeQ7r7hj_sPEDzhKjk-q2aoNHaGGJg1vabI--9EFkFsGQfoS7UbMMssS44dgR68XEnKtjn0Vys-Vzbvz_CBSCH6yQhRLik2SU2jR2L7BoFvh4LGZ6EKoQWzm8Z-CHXLGLUs4Hp5aPhF46dGzgAzwlPFW4t9G4DciX1uB4vv1XnfTc5wqJch6ltjKMde1GZwLR757a8dJSBcmGWze3UNE2YH_VLD7NCwH2kkqr3gh8rn7lWKG4AUIYPxsw9CB";
 
     let kid = token_kid(&jwt).expect("Failed to extract token KID");
-    assert_eq!(Some("8rDq8Pw0FZcaoXWTEVQo7+Tf2YzSL1fBxNKPCebaai4=".into()),
-               kid, "Extracted KID did not match expected KID");
+    assert_eq!(
+        Some("8rDq8Pw0FZcaoXWTEVQo7+Tf2YzSL1fBxNKPCebaai4=".into()),
+        kid,
+        "Extracted KID did not match expected KID"
+    );
 }
 
 #[test]
 fn test_validate_jwt() {
     let jwks_json = "{\"keys\":[{\"kty\":\"RSA\",\"alg\":\"RS256\",\"use\":\"sig\",\"kid\":\"8rDq8Pw0FZcaoXWTEVQo7+Tf2YzSL1fBxNKPCebaai4=\",\"n\":\"l4UTgk1zr-8C8utt0E57DtBV6qqAPWzVRrIuQS2j0_hp2CviaNl5XzGRDnB8gwk0Hx95YOhJupAe6RNq5ok3fDdxL7DLvppJNRLz3Ag9CsmDLcbXgNEQys33fBJaPw1v3GcaFC4tisU5p-o1f5RfWwvwdBtdBfGiwT1GRvbc5sFx6M4iYjg9uv1lNKW60PqSJW4iDYrfqzZmB0zF1SJ0BL_rnQZ1Wi_UkFmNe9arM8W9tI9T3Ie59HITFuyVSTCt6qQEtSfa1e5PiBaVuV3qoFI2jPBiVZQ6LPGBWEDyz4QtrHLdECPPoTF30NN6TSVwwlRbCuUUrdNdXdjYe2dMFQ\",\"e\":\"DhaD5zC7mzaDvHO192wKT_9sfsVmdy8w8T8C9VG17_b1jG2srd3cmc6Ycw-0blDf53Wrpi9-KGZXKHX6_uIuJK249WhkP7N1SHrTJxO0sUJ8AhK482PLF09Qtu6cUfJqY1X1y1S2vACJZItU4Vjr3YAfiVGQXeA8frAf7Sm4O1CBStCyg6yCcIbGojII0jfh2vSB-GD9ok1F69Nmk-R-bClyqMCV_Oq-5a0gqClVS8pDyGYMgKTww2RHgZaFSUcG13KeLMQsG2UOB2OjSC8FkOXK00NBlAjU3d0Vv-IamaLIszO7FQBY3Oh0uxNOvIE9ofQyCOpB-xIK6V9CTTphxw\"}]}";
 
-    let jwks: JWKS = serde_json::from_str(jwks_json)
-        .expect("Failed to decode JWKS");
+    let jwks: JWKS = serde_json::from_str(jwks_json).expect("Failed to decode JWKS");
 
-    let jwk = jwks.find("8rDq8Pw0FZcaoXWTEVQo7+Tf2YzSL1fBxNKPCebaai4=")
+    let jwk = jwks
+        .find("8rDq8Pw0FZcaoXWTEVQo7+Tf2YzSL1fBxNKPCebaai4=")
         .expect("Failed to find required JWK");
 
     let pkey = public_key_from_jwk(&jwk).expect("Failed to construct public key");
diff --git a/net/crimp/src/lib.rs b/net/crimp/src/lib.rs
index b52ebc3ef09c..4dd4d6c31bd7 100644
--- a/net/crimp/src/lib.rs
+++ b/net/crimp/src/lib.rs
@@ -33,9 +33,12 @@
 //! use crimp::Request;
 //!
 //! let response = Request::get("http://httpbin.org/get")
-//!     .user_agent("crimp test suite").unwrap()
-//!     .send().unwrap()
-//!     .as_string().unwrap();
+//!     .user_agent("crimp test suite")
+//!     .unwrap()
+//!     .send()
+//!     .unwrap()
+//!     .as_string()
+//!     .unwrap();
 //!
 //! println!("Status: {}\nBody: {}", response.status, response.body);
 //! # assert_eq!(response.status, 200);
@@ -54,10 +57,9 @@
 //!
 //! All optional features are enabled by default.
 //!
-//! * `json`: Adds `Request::json` and `Response::as_json` methods
-//!   which can be used for convenient serialisation of
-//!   request/response bodies using `serde_json`. This feature adds a
-//!   dependency on the `serde` and `serde_json` crates.
+//! * `json`: Adds `Request::json` and `Response::as_json` methods which can be used for convenient
+//!   serialisation of request/response bodies using `serde_json`. This feature adds a dependency on
+//!   the `serde` and `serde_json` crates.
 //!
 //! ## Initialisation
 //!
@@ -72,32 +74,42 @@
 
 extern crate curl;
 
-#[cfg(feature = "json")] extern crate serde;
-#[cfg(feature = "json")] extern crate serde_json;
+#[cfg(feature = "json")]
+extern crate serde;
+#[cfg(feature = "json")]
+extern crate serde_json;
 
 pub use curl::init;
 
-use curl::easy::{Auth, Easy, Form, List, Transfer, ReadError, WriteError};
+use curl::easy::{Auth, Easy, Form, List, ReadError, Transfer, WriteError};
 use std::collections::HashMap;
 use std::io::Write;
 use std::path::Path;
 use std::string::{FromUtf8Error, ToString};
 use std::time::Duration;
 
-#[cfg(feature = "json")] use serde::Serialize;
-#[cfg(feature = "json")] use serde::de::DeserializeOwned;
+#[cfg(feature = "json")]
+use serde::de::DeserializeOwned;
+#[cfg(feature = "json")]
+use serde::Serialize;
 
 #[cfg(test)]
 mod tests;
 
 /// HTTP method to use for the request.
 enum Method {
-    Get, Post, Put, Patch, Delete
+    Get,
+    Post,
+    Put,
+    Patch,
+    Delete,
 }
 
 /// Certificate types for client-certificate key pairs.
 pub enum CertType {
-    P12, PEM, DER
+    P12,
+    PEM,
+    DER,
 }
 
 /// Builder structure for an HTTP request.
@@ -145,7 +157,7 @@ pub struct Response<T> {
     pub body: T,
 }
 
-impl <'a> Request<'a> {
+impl<'a> Request<'a> {
     /// Initiate an HTTP request with the given method and URL.
     fn new(method: Method, url: &'a str) -> Self {
         Request {
@@ -158,19 +170,29 @@ impl <'a> Request<'a> {
     }
 
     /// Initiate a GET request with the given URL.
-    pub fn get(url: &'a str) -> Self { Request::new(Method::Get, url) }
+    pub fn get(url: &'a str) -> Self {
+        Request::new(Method::Get, url)
+    }
 
     /// Initiate a POST request with the given URL.
-    pub fn post(url: &'a str) -> Self { Request::new(Method::Post, url) }
+    pub fn post(url: &'a str) -> Self {
+        Request::new(Method::Post, url)
+    }
 
     /// Initiate a PUT request with the given URL.
-    pub fn put(url: &'a str) -> Self { Request::new(Method::Put, url) }
+    pub fn put(url: &'a str) -> Self {
+        Request::new(Method::Put, url)
+    }
 
     /// Initiate a PATCH request with the given URL.
-    pub fn patch(url: &'a str) -> Self { Request::new(Method::Patch, url) }
+    pub fn patch(url: &'a str) -> Self {
+        Request::new(Method::Patch, url)
+    }
 
     /// Initiate a DELETE request with the given URL.
-    pub fn delete(url: &'a str) -> Self { Request::new(Method::Delete, url) }
+    pub fn delete(url: &'a str) -> Self {
+        Request::new(Method::Delete, url)
+    }
 
     /// Add an HTTP header to a request.
     pub fn header(mut self, k: &str, v: &str) -> Result<Self, curl::Error> {
@@ -188,7 +210,8 @@ impl <'a> Request<'a> {
     /// Set the `Authorization` header to a `Bearer` value with the
     /// supplied token.
     pub fn bearer_auth(mut self, token: &str) -> Result<Self, curl::Error> {
-        self.headers.append(&format!("Authorization: Bearer {}", token))?;
+        self.headers
+            .append(&format!("Authorization: Bearer {}", token))?;
         Ok(self)
     }
 
@@ -212,8 +235,11 @@ impl <'a> Request<'a> {
     /// Consult the documentation for the `ssl_cert` and `ssl_key`
     /// functions in `curl::easy::Easy2` for details on supported
     /// formats and defaults.
-    pub fn tls_client_cert<P: AsRef<Path>>(mut self, cert_type: CertType, cert: P)
-                                           -> Result<Self, curl::Error> {
+    pub fn tls_client_cert<P: AsRef<Path>>(
+        mut self,
+        cert_type: CertType,
+        cert: P,
+    ) -> Result<Self, curl::Error> {
         self.handle.ssl_cert(cert)?;
         self.handle.ssl_cert_type(match cert_type {
             CertType::P12 => "P12",
@@ -262,13 +288,17 @@ impl <'a> Request<'a> {
     /// ```
     /// # use crimp::Request;
     /// let response = Request::get("https://httpbin.org/get")
-    ///     .with_handle(|mut handle| handle.referer("Example-Referer")).unwrap()
-    ///     .send().unwrap();
+    ///     .with_handle(|mut handle| handle.referer("Example-Referer"))
+    ///     .unwrap()
+    ///     .send()
+    ///     .unwrap();
     /// #
     /// # assert!(response.is_success());
     /// ```
     pub fn with_handle<F>(mut self, function: F) -> Result<Self, curl::Error>
-    where F: FnOnce(&mut Easy) -> Result<(), curl::Error> {
+    where
+        F: FnOnce(&mut Easy) -> Result<(), curl::Error>,
+    {
         function(&mut self.handle)?;
         Ok(self)
     }
@@ -293,12 +323,15 @@ impl <'a> Request<'a> {
     /// let mut form = Form::new();
     /// form.part("some-name")
     ///     .contents("some-data".as_bytes())
-    ///     .add().unwrap();
+    ///     .add()
+    ///     .unwrap();
     ///
     /// let response = Request::post("https://httpbin.org/post")
-    ///     .user_agent("crimp test suite").unwrap()
+    ///     .user_agent("crimp test suite")
+    ///     .unwrap()
     ///     .form(form)
-    ///     .send().unwrap();
+    ///     .send()
+    ///     .unwrap();
     /// #
     /// # assert_eq!(200, response.status, "form POST should succeed");
     /// # assert_eq!(
@@ -330,10 +363,10 @@ impl <'a> Request<'a> {
         self.handle.url(self.url)?;
 
         match self.method {
-            Method::Get    => self.handle.get(true)?,
-            Method::Post   => self.handle.post(true)?,
-            Method::Put    => self.handle.put(true)?,
-            Method::Patch  => self.handle.custom_request("PATCH")?,
+            Method::Get => self.handle.get(true)?,
+            Method::Post => self.handle.post(true)?,
+            Method::Put => self.handle.put(true)?,
+            Method::Patch => self.handle.custom_request("PATCH")?,
             Method::Delete => self.handle.custom_request("DELETE")?,
         }
 
@@ -351,21 +384,22 @@ impl <'a> Request<'a> {
 
         // Optionally set content type if a body payload is configured
         // and configure the expected body size (or form payload).
-         match self.body {
+        match self.body {
             Body::Bytes { content_type, data } => {
                 self.handle.post_field_size(data.len() as u64)?;
-                self.headers.append(&format!("Content-Type: {}", content_type))?;
-            },
+                self.headers
+                    .append(&format!("Content-Type: {}", content_type))?;
+            }
 
             #[cfg(feature = "json")]
             Body::Json(ref data) => {
                 self.handle.post_field_size(data.len() as u64)?;
                 self.headers.append("Content-Type: application/json")?;
-            },
+            }
 
-             // Do not set content-type header at all if there is no
-             // body, or if the form handler was invoked above.
-             _ => (),
+            // Do not set content-type header at all if there is no
+            // body, or if the form handler was invoked above.
+            _ => (),
         };
 
         // Configure headers on the request:
@@ -407,9 +441,7 @@ impl <'a> Request<'a> {
                     return true;
                 }
 
-                headers.insert(
-                    split[0].trim().to_string(), split[1].trim().to_string()
-                );
+                headers.insert(split[0].trim().to_string(), split[1].trim().to_string());
                 true
             })?;
 
@@ -427,7 +459,7 @@ impl <'a> Request<'a> {
         Ok(Response {
             status: self.handle.response_code()?,
             headers,
-            body
+            body,
         })
     }
 }
@@ -438,13 +470,14 @@ impl <'a> Request<'a> {
 ///
 /// As we manually set the expected upload size, cURL will call the
 /// read callback repeatedly until it has all the data it needs.
-fn chunked_read_function<'easy, 'data>(transfer: &mut Transfer<'easy, 'data>,
-                                       data: &'data [u8]) -> Result<(), curl::Error> {
+fn chunked_read_function<'easy, 'data>(
+    transfer: &mut Transfer<'easy, 'data>,
+    data: &'data [u8],
+) -> Result<(), curl::Error> {
     let mut data = data;
 
     transfer.read_function(move |mut into| {
-        let written = into.write(data)
-            .map_err(|_| ReadError::Abort)?;
+        let written = into.write(data).map_err(|_| ReadError::Abort)?;
 
         data = &data[written..];
 
@@ -452,7 +485,7 @@ fn chunked_read_function<'easy, 'data>(transfer: &mut Transfer<'easy, 'data>,
     })
 }
 
-impl <T> Response<T> {
+impl<T> Response<T> {
     /// Check whether the status code of this HTTP response is a
     /// success (i.e. in the 200-299 range).
     pub fn is_success(&self) -> bool {
@@ -466,9 +499,11 @@ impl <T> Response<T> {
     /// This function exists for convenience to avoid having to write
     /// repetitive `if !response.is_success() { ... }` blocks.
     pub fn error_for_status<F, E>(self, closure: F) -> Result<Self, E>
-    where F: FnOnce(Self) -> E {
+    where
+        F: FnOnce(Self) -> E,
+    {
         if !self.is_success() {
-            return Err(closure(self))
+            return Err(closure(self));
         }
 
         Ok(self)
diff --git a/net/crimp/src/tests.rs b/net/crimp/src/tests.rs
index 6c2bc4f5b37a..e8e9223ce804 100644
--- a/net/crimp/src/tests.rs
+++ b/net/crimp/src/tests.rs
@@ -6,7 +6,7 @@
 //    docker run --rm -p 4662:80 kennethreitz/httpbin
 
 use super::*;
-use serde_json::{Value, json};
+use serde_json::{json, Value};
 
 // These tests check whether the correct HTTP method is used in the
 // requests.
@@ -14,7 +14,8 @@ use serde_json::{Value, json};
 #[test]
 fn test_http_get() {
     let resp = Request::get("http://127.0.0.1:4662/get")
-        .send().expect("failed to send request");
+        .send()
+        .expect("failed to send request");
 
     assert!(resp.is_success(), "request should have succeeded");
 }
@@ -22,7 +23,8 @@ fn test_http_get() {
 #[test]
 fn test_http_delete() {
     let resp = Request::delete("http://127.0.0.1:4662/delete")
-        .send().expect("failed to send request");
+        .send()
+        .expect("failed to send request");
 
     assert_eq!(200, resp.status, "response status should be 200 OK");
 }
@@ -30,7 +32,8 @@ fn test_http_delete() {
 #[test]
 fn test_http_put() {
     let resp = Request::put("http://127.0.0.1:4662/put")
-        .send().expect("failed to send request");
+        .send()
+        .expect("failed to send request");
 
     assert_eq!(200, resp.status, "response status should be 200 OK");
 }
@@ -38,7 +41,8 @@ fn test_http_put() {
 #[test]
 fn test_http_patch() {
     let resp = Request::patch("http://127.0.0.1:4662/patch")
-        .send().expect("failed to send request");
+        .send()
+        .expect("failed to send request");
 
     assert_eq!(200, resp.status, "response status should be 200 OK");
 }
@@ -50,18 +54,25 @@ fn test_http_patch() {
 fn test_http_post() {
     let body = "test body";
     let response = Request::post("http://127.0.0.1:4662/post")
-        .user_agent("crimp test suite").expect("failed to set user-agent")
-        .timeout(Duration::from_secs(5)).expect("failed to set request timeout")
+        .user_agent("crimp test suite")
+        .expect("failed to set user-agent")
+        .timeout(Duration::from_secs(5))
+        .expect("failed to set request timeout")
         .body("text/plain", &body.as_bytes())
-        .send().expect("failed to send request")
-        .as_json::<Value>().expect("failed to deserialize response");
+        .send()
+        .expect("failed to send request")
+        .as_json::<Value>()
+        .expect("failed to deserialize response");
 
     let data = response.body;
 
     assert_eq!(200, response.status, "response status should be 200 OK");
 
-    assert_eq!(data.get("data").unwrap(), &json!("test body"),
-               "test body should have been POSTed");
+    assert_eq!(
+        data.get("data").unwrap(),
+        &json!("test body"),
+        "test body should have been POSTed"
+    );
 
     assert_eq!(
         data.get("headers").unwrap().get("Content-Type").unwrap(),
@@ -70,26 +81,34 @@ fn test_http_post() {
     );
 }
 
-#[cfg(feature = "json")] #[test]
+#[cfg(feature = "json")]
+#[test]
 fn test_http_post_json() {
     let body = json!({
         "purpose": "testing!"
     });
 
     let response = Request::post("http://127.0.0.1:4662/post")
-        .user_agent("crimp test suite").expect("failed to set user-agent")
-        .timeout(Duration::from_secs(5)).expect("failed to set request timeout")
-        .json(&body).expect("request serialization failed")
-        .send().expect("failed to send request")
-        .as_json::<Value>().expect("failed to deserialize response");
-
+        .user_agent("crimp test suite")
+        .expect("failed to set user-agent")
+        .timeout(Duration::from_secs(5))
+        .expect("failed to set request timeout")
+        .json(&body)
+        .expect("request serialization failed")
+        .send()
+        .expect("failed to send request")
+        .as_json::<Value>()
+        .expect("failed to deserialize response");
 
     let data = response.body;
 
     assert_eq!(200, response.status, "response status should be 200 OK");
 
-    assert_eq!(data.get("json").unwrap(), &body,
-               "test body should have been POSTed");
+    assert_eq!(
+        data.get("json").unwrap(),
+        &body,
+        "test body should have been POSTed"
+    );
 
     assert_eq!(
         data.get("headers").unwrap().get("Content-Type").unwrap(),
@@ -104,8 +123,10 @@ fn test_http_post_json() {
 #[test]
 fn test_bearer_auth() {
     let response = Request::get("http://127.0.0.1:4662/bearer")
-        .bearer_auth("some-token").expect("failed to set auth header")
-        .send().expect("failed to send request");
+        .bearer_auth("some-token")
+        .expect("failed to set auth header")
+        .send()
+        .expect("failed to send request");
 
     assert!(response.is_success(), "authorized request should succeed");
 }
@@ -115,8 +136,10 @@ fn test_basic_auth() {
     let request = Request::get("http://127.0.0.1:4662/basic-auth/alan_watts/oneness");
 
     let response = request
-        .basic_auth("alan_watts", "oneness").expect("failed to set auth header")
-        .send().expect("failed to send request");
+        .basic_auth("alan_watts", "oneness")
+        .expect("failed to set auth header")
+        .send()
+        .expect("failed to send request");
 
     assert!(response.is_success(), "authorized request should succeed");
 }
@@ -129,14 +152,20 @@ fn test_large_body() {
 
     let resp = Request::post("http://127.0.0.1:4662/post")
         .body("application/octet-stream", &[0; BODY_SIZE])
-        .send().expect("sending request")
-        .as_json::<Value>().expect("JSON deserialisation");
+        .send()
+        .expect("sending request")
+        .as_json::<Value>()
+        .expect("JSON deserialisation");
 
     // httpbin returns the uploaded data as a string in the `data`
     // field.
     let data = resp.body.get("data").unwrap().as_str().unwrap();
 
-    assert_eq!(BODY_SIZE, data.len(), "uploaded data length should be correct");
+    assert_eq!(
+        BODY_SIZE,
+        data.len(),
+        "uploaded data length should be correct"
+    );
 }
 
 // Tests for various other features.
@@ -144,9 +173,13 @@ fn test_large_body() {
 #[test]
 fn test_error_for_status() {
     let response = Request::get("http://127.0.0.1:4662/patch")
-        .send().expect("failed to send request")
+        .send()
+        .expect("failed to send request")
         .error_for_status(|resp| format!("Response error code: {}", resp.status));
 
-    assert_eq!(Err("Response error code: 405".into()), response,
-               "returned error should be converted into Result::Err");
+    assert_eq!(
+        Err("Response error code: 405".into()),
+        response,
+        "returned error should be converted into Result::Err"
+    );
 }
diff --git a/nix/nint/nint.rs b/nix/nint/nint.rs
index 1fa4dccb4f86..abb0153c3ad2 100644
--- a/nix/nint/nint.rs
+++ b/nix/nint/nint.rs
@@ -1,11 +1,11 @@
 extern crate serde_json;
 
 use serde_json::Value;
+use std::convert::TryFrom;
 use std::ffi::OsString;
-use std::os::unix::ffi::{OsStringExt, OsStrExt};
-use std::io::{Error, ErrorKind, Write, stdout, stderr};
+use std::io::{stderr, stdout, Error, ErrorKind, Write};
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
 use std::process::Command;
-use std::convert::{TryFrom};
 
 fn render_nix_string(s: &OsString) -> OsString {
     let mut rendered = Vec::new();
@@ -16,8 +16,8 @@ fn render_nix_string(s: &OsString) -> OsString {
         match char::from(*b) {
             '\"' => rendered.extend(b"\\\""),
             '\\' => rendered.extend(b"\\\\"),
-            '$'  => rendered.extend(b"\\$"),
-            _    => rendered.push(*b),
+            '$' => rendered.extend(b"\\$"),
+            _ => rendered.push(*b),
         }
     }
 
@@ -48,17 +48,14 @@ fn render_nix_list(arr: &[OsString]) -> OsString {
 macro_rules! handle_set_output {
     ($map_name:ident, $output_name:ident) => {
         match $map_name.get(stringify!($output_name)) {
-            Some(Value::String(s)) =>
-                $output_name().write_all(s.as_bytes()),
-            Some(_) => Err(
-                Error::new(
-                    ErrorKind::Other,
-                    format!("Attribute {} must be a string!", stringify!($output_name)),
-                )
-            ),
+            Some(Value::String(s)) => $output_name().write_all(s.as_bytes()),
+            Some(_) => Err(Error::new(
+                ErrorKind::Other,
+                format!("Attribute {} must be a string!", stringify!($output_name)),
+            )),
             None => Ok(()),
         }
-    }
+    };
 }
 
 fn main() -> std::io::Result<()> {
@@ -83,7 +80,7 @@ fn main() -> std::io::Result<()> {
         }
 
         if in_args {
-            match(arg.to_str()) {
+            match (arg.to_str()) {
                 Some("--arg") | Some("--argstr") => {
                     nix_args.push(arg);
                     nix_args.push(args.next().unwrap());
@@ -116,9 +113,7 @@ fn main() -> std::io::Result<()> {
 
         nix_args.push(argv[0].clone());
 
-        let run = Command::new("nix-instantiate")
-                          .args(nix_args)
-                          .output()?;
+        let run = Command::new("nix-instantiate").args(nix_args).output()?;
 
         match serde_json::from_slice(&run.stdout[..]) {
             Ok(Value::String(s)) => stdout().write_all(s.as_bytes()),
@@ -132,25 +127,23 @@ fn main() -> std::io::Result<()> {
 
                         match code {
                             Some(i) => std::process::exit(i),
-                            None => Err(
-                                Error::new(
-                                    ErrorKind::Other,
-                                    "Attribute exit is not an i32"
-                                )
-                            ),
+                            None => {
+                                Err(Error::new(ErrorKind::Other, "Attribute exit is not an i32"))
+                            }
                         }
-                    },
-                    Some(_) => Err(
-                        Error::new(ErrorKind::Other, "exit must be a number")
-                    ),
+                    }
+                    Some(_) => Err(Error::new(ErrorKind::Other, "exit must be a number")),
                     None => Ok(()),
                 }
-            },
-            Ok(_) => Err(Error::new(ErrorKind::Other, "output must be a string or an object")),
+            }
+            Ok(_) => Err(Error::new(
+                ErrorKind::Other,
+                "output must be a string or an object",
+            )),
             _ => {
                 stderr().write_all(&run.stderr[..]);
                 Err(Error::new(ErrorKind::Other, "internal nix error"))
-            },
+            }
         }
     }
 }
diff --git a/ops/journaldriver/build.rs b/ops/journaldriver/build.rs
index d64c82a88a71..79eb1001bf09 100644
--- a/ops/journaldriver/build.rs
+++ b/ops/journaldriver/build.rs
@@ -1,6 +1,5 @@
 extern crate pkg_config;
 
 fn main() {
-    pkg_config::probe_library("libsystemd")
-        .expect("Could not probe libsystemd");
+    pkg_config::probe_library("libsystemd").expect("Could not probe libsystemd");
 }
diff --git a/ops/journaldriver/src/main.rs b/ops/journaldriver/src/main.rs
index 9886af1c3696..0c0e5cc23bff 100644
--- a/ops/journaldriver/src/main.rs
+++ b/ops/journaldriver/src/main.rs
@@ -31,11 +31,16 @@
 //! `GOOGLE_APPLICATION_CREDENTIALS`, `GOOGLE_CLOUD_PROJECT` and
 //! `LOG_NAME` environment variables.
 
-#[macro_use] extern crate failure;
-#[macro_use] extern crate log;
-#[macro_use] extern crate serde_derive;
-#[macro_use] extern crate serde_json;
-#[macro_use] extern crate lazy_static;
+#[macro_use]
+extern crate failure;
+#[macro_use]
+extern crate log;
+#[macro_use]
+extern crate serde_derive;
+#[macro_use]
+extern crate serde_json;
+#[macro_use]
+extern crate lazy_static;
 
 extern crate chrono;
 extern crate env_logger;
@@ -48,13 +53,11 @@ use chrono::offset::LocalResult;
 use chrono::prelude::{DateTime, TimeZone, Utc};
 use failure::ResultExt;
 use serde_json::{from_str, Value};
-use std::env;
-use std::fs::{self, File, rename};
-use std::io::{self, Read, ErrorKind, Write};
-use std::mem;
+use std::fs::{self, rename, File};
+use std::io::{self, ErrorKind, Read, Write};
 use std::path::PathBuf;
-use std::process;
 use std::time::{Duration, Instant};
+use std::{env, mem, process};
 use systemd::journal::{Journal, JournalFiles, JournalRecord, JournalSeek};
 
 #[cfg(test)]
@@ -62,10 +65,12 @@ mod tests;
 
 const LOGGING_SERVICE: &str = "https://logging.googleapis.com/google.logging.v2.LoggingServiceV2";
 const ENTRIES_WRITE_URL: &str = "https://logging.googleapis.com/v2/entries:write";
-const METADATA_TOKEN_URL: &str = "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token";
+const METADATA_TOKEN_URL: &str =
+    "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token";
 const METADATA_ID_URL: &str = "http://metadata.google.internal/computeMetadata/v1/instance/id";
 const METADATA_ZONE_URL: &str = "http://metadata.google.internal/computeMetadata/v1/instance/zone";
-const METADATA_PROJECT_URL: &str = "http://metadata.google.internal/computeMetadata/v1/project/project-id";
+const METADATA_PROJECT_URL: &str =
+    "http://metadata.google.internal/computeMetadata/v1/project/project-id";
 
 /// Convenience type alias for results using failure's `Error` type.
 type Result<T> = std::result::Result<T, failure::Error>;
@@ -134,14 +139,17 @@ fn get_metadata(url: &str) -> Result<String> {
 
     if response.ok() {
         // Whitespace is trimmed to remove newlines from responses.
-        let body = response.into_string()
+        let body = response
+            .into_string()
             .context("Failed to decode metadata response")?
-            .trim().to_string();
+            .trim()
+            .to_string();
 
         Ok(body)
     } else {
         let status = response.status_line().to_string();
-        let body = response.into_string()
+        let body = response
+            .into_string()
             .unwrap_or_else(|e| format!("Metadata body error: {}", e));
         bail!("Metadata failure: {} ({})", body, status)
     }
@@ -186,11 +194,9 @@ fn determine_monitored_resource() -> Value {
             }
         })
     } else {
-        let instance_id = get_metadata(METADATA_ID_URL)
-            .expect("Could not determine instance ID");
+        let instance_id = get_metadata(METADATA_ID_URL).expect("Could not determine instance ID");
 
-        let zone = get_metadata(METADATA_ZONE_URL)
-            .expect("Could not determine instance zone");
+        let zone = get_metadata(METADATA_ZONE_URL).expect("Could not determine instance zone");
 
         json!({
             "type": "gce_instance",
@@ -253,7 +259,8 @@ fn sign_service_account_token(credentials: &Credentials) -> Result<Token> {
     use medallion::{Algorithm, Header, Payload};
 
     let iat = Utc::now();
-    let exp = iat.checked_add_signed(chrono::Duration::seconds(3600))
+    let exp = iat
+        .checked_add_signed(chrono::Duration::seconds(3600))
         .ok_or_else(|| format_err!("Failed to calculate token expiry"))?;
 
     let header = Header {
@@ -323,7 +330,9 @@ enum Payload {
 /// text format.
 fn message_to_payload(message: Option<String>) -> Payload {
     match message {
-        None => Payload::TextPayload { text_payload: "empty log entry".into() },
+        None => Payload::TextPayload {
+            text_payload: "empty log entry".into(),
+        },
         Some(text_payload) => {
             // Attempt to deserialize the text payload as a generic
             // JSON value.
@@ -333,7 +342,7 @@ fn message_to_payload(message: Option<String>) -> Payload {
                 // expect other types of JSON payload) and return it
                 // in that case.
                 if json_payload.is_object() {
-                    return Payload::JsonPayload { json_payload }
+                    return Payload::JsonPayload { json_payload };
                 }
             }
 
@@ -450,9 +459,7 @@ impl From<JournalRecord> for LogEntry {
         // Journald uses syslogd's concept of priority. No idea if this is
         // always present, but it's optional in the Stackdriver API, so we just
         // omit it if we can't find or parse it.
-        let severity = record
-            .remove("PRIORITY")
-            .and_then(priority_to_severity);
+        let severity = record.remove("PRIORITY").and_then(priority_to_severity);
 
         LogEntry {
             payload,
@@ -468,8 +475,7 @@ impl From<JournalRecord> for LogEntry {
 
 /// Attempt to read from the journal. If no new entry is present,
 /// await the next one up to the specified timeout.
-fn receive_next_record(timeout: Duration, journal: &mut Journal)
-                       -> Result<Option<JournalRecord>> {
+fn receive_next_record(timeout: Duration, journal: &mut Journal) -> Result<Option<JournalRecord>> {
     let next_record = journal.next_record()?;
     if next_record.is_some() {
         return Ok(next_record);
@@ -525,11 +531,10 @@ fn persist_cursor(cursor: String) -> Result<()> {
     if cursor.is_empty() {
         error!("Received empty journald cursor position, refusing to persist!");
         error!("Please report this message at https://github.com/tazjin/journaldriver/issues/2");
-        return Ok(())
+        return Ok(());
     }
 
-    let mut file = File::create(&*CURSOR_TMP_FILE)
-        .context("Failed to create cursor file")?;
+    let mut file = File::create(&*CURSOR_TMP_FILE).context("Failed to create cursor file")?;
 
     write!(file, "{}", cursor).context("Failed to write cursor file")?;
 
@@ -547,9 +552,7 @@ fn persist_cursor(cursor: String) -> Result<()> {
 ///
 /// If flushing is successful the last cursor position will be
 /// persisted to disk.
-fn flush(token: &mut Token,
-         entries: Vec<LogEntry>,
-         cursor: String) -> Result<()> {
+fn flush(token: &mut Token, entries: Vec<LogEntry>, cursor: String) -> Result<()> {
     if token.is_expired() {
         debug!("Refreshing Google metadata access token");
         let new_token = get_token()?;
@@ -598,7 +601,8 @@ fn write_entries(token: &Token, request: Value) -> Result<()> {
         Ok(())
     } else {
         let status = response.status_line().to_string();
-        let body = response.into_string()
+        let body = response
+            .into_string()
             .unwrap_or_else(|_| "no response body".into());
         bail!("Write failure: {} ({})", body, status)
     }
@@ -624,14 +628,12 @@ fn initial_cursor() -> Result<JournalSeek> {
         Err(ref err) if err.kind() == ErrorKind::NotFound => {
             info!("No previous cursor position, reading from journal tail");
             Ok(JournalSeek::Tail)
-        },
-        Err(err) => {
-            (Err(err).context("Could not read cursor position"))?
         }
+        Err(err) => (Err(err).context("Could not read cursor position"))?,
     }
 }
 
-fn main () {
+fn main() {
     env_logger::init();
 
     // The directory in which cursor positions are persisted should
@@ -641,17 +643,17 @@ fn main () {
         process::exit(1);
     }
 
-    let cursor_position_dir = CURSOR_FILE.parent()
+    let cursor_position_dir = CURSOR_FILE
+        .parent()
         .expect("Invalid cursor position file path");
 
     fs::create_dir_all(cursor_position_dir)
         .expect("Could not create directory to store cursor position in");
 
-    let mut journal = Journal::open(JournalFiles::All, false, true)
-        .expect("Failed to open systemd journal");
+    let mut journal =
+        Journal::open(JournalFiles::All, false, true).expect("Failed to open systemd journal");
 
-    let seek_position = initial_cursor()
-        .expect("Failed to determine initial cursor position");
+    let seek_position = initial_cursor().expect("Failed to determine initial cursor position");
 
     match journal.seek(seek_position) {
         Ok(cursor) => info!("Opened journal at cursor '{}'", cursor),
diff --git a/ops/journaldriver/src/tests.rs b/ops/journaldriver/src/tests.rs
index 779add7a7012..8a07cc6fed9e 100644
--- a/ops/journaldriver/src/tests.rs
+++ b/ops/journaldriver/src/tests.rs
@@ -15,7 +15,10 @@ fn test_text_entry_serialization() {
     let expected = "{\"labels\":null,\"textPayload\":\"test entry\"}";
     let result = to_string(&entry).expect("serialization failed");
 
-    assert_eq!(expected, result, "Plain text payload should serialize correctly")
+    assert_eq!(
+        expected, result,
+        "Plain text payload should serialize correctly"
+    )
 }
 
 #[test]
@@ -26,7 +29,7 @@ fn test_json_entry_serialization() {
         payload: Payload::JsonPayload {
             json_payload: json!({
                 "message": "JSON test"
-            })
+            }),
         },
         severity: None,
     };
@@ -45,7 +48,10 @@ fn test_plain_text_payload() {
         text_payload: "plain text payload".into(),
     };
 
-    assert_eq!(expected, payload, "Plain text payload should be detected correctly");
+    assert_eq!(
+        expected, payload,
+        "Plain text payload should be detected correctly"
+    );
 }
 
 #[test]
@@ -55,7 +61,10 @@ fn test_empty_payload() {
         text_payload: "empty log entry".into(),
     };
 
-    assert_eq!(expected, payload, "Empty payload should be handled correctly");
+    assert_eq!(
+        expected, payload,
+        "Empty payload should be handled correctly"
+    );
 }
 
 #[test]
@@ -66,10 +75,13 @@ fn test_json_payload() {
         json_payload: json!({
             "someKey": "someValue",
             "otherKey": 42
-        })
+        }),
     };
 
-    assert_eq!(expected, payload, "JSON payload should be detected correctly");
+    assert_eq!(
+        expected, payload,
+        "JSON payload should be detected correctly"
+    );
 }
 
 #[test]
@@ -82,14 +94,16 @@ fn test_json_no_object() {
         text_payload: "42".into(),
     };
 
-    assert_eq!(expected, payload, "Non-object JSON payload should be plain text");
+    assert_eq!(
+        expected, payload,
+        "Non-object JSON payload should be plain text"
+    );
 }
 
 #[test]
 fn test_parse_microseconds() {
     let input: String = "1529175149291187".into();
-    let expected: DateTime<Utc> = "2018-06-16T18:52:29.291187Z"
-        .to_string().parse().unwrap();
+    let expected: DateTime<Utc> = "2018-06-16T18:52:29.291187Z".to_string().parse().unwrap();
 
     assert_eq!(Some(expected), parse_microseconds(input));
 }
diff --git a/ops/mq_cli/src/main.rs b/ops/mq_cli/src/main.rs
index 55ff0064295d..927993b486ad 100644
--- a/ops/mq_cli/src/main.rs
+++ b/ops/mq_cli/src/main.rs
@@ -1,36 +1,38 @@
 extern crate clap;
-extern crate posix_mq;
 extern crate libc;
 extern crate nix;
+extern crate posix_mq;
 
-use clap::{App, SubCommand, Arg, ArgMatches, AppSettings};
-use posix_mq::{Name, Queue, Message};
+use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
+use posix_mq::{Message, Name, Queue};
 use std::fs::{read_dir, File};
 use std::io::{self, Read, Write};
 use std::process::exit;
 
 fn run_ls() {
-    let mqueues = read_dir("/dev/mqueue")
-        .expect("Could not read message queues");
+    let mqueues = read_dir("/dev/mqueue").expect("Could not read message queues");
 
     for queue in mqueues {
         let path = queue.unwrap().path();
         let status = {
-            let mut file = File::open(&path)
-                .expect("Could not open queue file");
+            let mut file = File::open(&path).expect("Could not open queue file");
 
             let mut content = String::new();
-            file.read_to_string(&mut content).expect("Could not read queue file");
+            file.read_to_string(&mut content)
+                .expect("Could not read queue file");
 
             content
         };
 
-        let queue_name = path.components().last().unwrap()
+        let queue_name = path
+            .components()
+            .last()
+            .unwrap()
             .as_os_str()
             .to_string_lossy();
 
         println!("/{}: {}", queue_name, status)
-    };
+    }
 }
 
 fn run_inspect(queue_name: &str) {
@@ -47,8 +49,7 @@ fn run_create(cmd: &ArgMatches) {
         set_rlimit(rlimit.parse().expect("Invalid rlimit value"));
     }
 
-    let name = Name::new(cmd.value_of("queue").unwrap())
-        .expect("Invalid queue name");
+    let name = Name::new(cmd.value_of("queue").unwrap()).expect("Invalid queue name");
 
     let max_pending: i64 = cmd.value_of("max-pending").unwrap().parse().unwrap();
     let max_size: i64 = cmd.value_of("max-size").unwrap().parse().unwrap();
@@ -56,11 +57,11 @@ fn run_create(cmd: &ArgMatches) {
     let queue = Queue::create(name, max_pending, max_size * 1024);
 
     match queue {
-        Ok(_)  => println!("Queue created successfully"),
+        Ok(_) => println!("Queue created successfully"),
         Err(e) => {
             writeln!(io::stderr(), "Could not create queue: {}", e).ok();
             exit(1);
-        },
+        }
     };
 }
 
@@ -120,7 +121,12 @@ fn run_rlimit() {
     };
 
     if errno != 0 {
-        writeln!(io::stderr(), "Could not get message queue rlimit: {}", errno).ok();
+        writeln!(
+            io::stderr(),
+            "Could not get message queue rlimit: {}",
+            errno
+        )
+        .ok();
     } else {
         println!("Message queue rlimit:");
         println!("Current limit: {}", rlimit.rlim_cur);
@@ -170,16 +176,20 @@ fn main() {
         .about("Create a new queue")
         .arg(&queue_arg)
         .arg(&rlimit_arg)
-        .arg(Arg::with_name("max-size")
-            .help("maximum message size (in kB)")
-            .long("max-size")
-            .required(true)
-            .takes_value(true))
-        .arg(Arg::with_name("max-pending")
-            .help("maximum # of pending messages")
-            .long("max-pending")
-            .required(true)
-            .takes_value(true));
+        .arg(
+            Arg::with_name("max-size")
+                .help("maximum message size (in kB)")
+                .long("max-size")
+                .required(true)
+                .takes_value(true),
+        )
+        .arg(
+            Arg::with_name("max-pending")
+                .help("maximum # of pending messages")
+                .long("max-pending")
+                .required(true)
+                .takes_value(true),
+        );
 
     let receive = SubCommand::with_name("receive")
         .about("Receive a message from a queue")
@@ -188,9 +198,11 @@ fn main() {
     let send = SubCommand::with_name("send")
         .about("Send a message to a queue")
         .arg(&queue_arg)
-        .arg(Arg::with_name("message")
-            .help("the message to send")
-            .required(true));
+        .arg(
+            Arg::with_name("message")
+                .help("the message to send")
+                .required(true),
+        );
 
     let rlimit = SubCommand::with_name("rlimit")
         .about("Get the message queue rlimit")
@@ -211,13 +223,13 @@ fn main() {
     match matches.subcommand() {
         ("ls", _) => run_ls(),
         ("inspect", Some(cmd)) => run_inspect(cmd.value_of("queue").unwrap()),
-        ("create",  Some(cmd)) => run_create(cmd),
+        ("create", Some(cmd)) => run_create(cmd),
         ("receive", Some(cmd)) => run_receive(cmd.value_of("queue").unwrap()),
-        ("send",    Some(cmd)) => run_send(
+        ("send", Some(cmd)) => run_send(
             cmd.value_of("queue").unwrap(),
-            cmd.value_of("message").unwrap()
+            cmd.value_of("message").unwrap(),
         ),
-        ("rlimit",  _) => run_rlimit(),
+        ("rlimit", _) => run_rlimit(),
         _ => unimplemented!(),
     }
 }
diff --git a/ops/posix_mq.rs/src/error.rs b/ops/posix_mq.rs/src/error.rs
index 84be154bee9a..bacd2aeb39e0 100644
--- a/ops/posix_mq.rs/src/error.rs
+++ b/ops/posix_mq.rs/src/error.rs
@@ -1,8 +1,5 @@
 use nix;
-use std::error;
-use std::fmt;
-use std::io;
-use std::num;
+use std::{error, fmt, io, num};
 
 /// This module implements a simple error type to match the errors that can be thrown from the C
 /// functions as well as some extra errors resulting from internal validations.
diff --git a/ops/posix_mq.rs/src/tests.rs b/ops/posix_mq.rs/src/tests.rs
index 7a08876aeacd..1f4ea9a58da6 100644
--- a/ops/posix_mq.rs/src/tests.rs
+++ b/ops/posix_mq.rs/src/tests.rs
@@ -4,8 +4,7 @@ use super::*;
 fn test_open_delete() {
     // Simple test with default queue settings
     let name = Name::new("/test-queue").unwrap();
-    let queue = Queue::open_or_create(name)
-        .expect("Opening queue failed");
+    let queue = Queue::open_or_create(name).expect("Opening queue failed");
 
     let message = Message {
         data: "test-message".as_bytes().to_vec(),
diff --git a/tools/cheddar/build.rs b/tools/cheddar/build.rs
index b63b2e337851..f70818d80177 100644
--- a/tools/cheddar/build.rs
+++ b/tools/cheddar/build.rs
@@ -28,14 +28,19 @@ fn main() {
 
     // Otherwise ask Nix to build it and inject the result.
     let output = Command::new("nix-build")
-        .arg("-A").arg("third_party.bat_syntaxes")
+        .arg("-A")
+        .arg("third_party.bat_syntaxes")
         // ... assuming cheddar is at //tools/cheddar ...
         .arg("../..")
         .output()
         .expect(ERROR_MESSAGE);
 
     if !output.status.success() {
-        eprintln!("{}\nNix output: {}", ERROR_MESSAGE, String::from_utf8_lossy(&output.stderr));
+        eprintln!(
+            "{}\nNix output: {}",
+            ERROR_MESSAGE,
+            String::from_utf8_lossy(&output.stderr)
+        );
         return;
     }
 
diff --git a/tools/cheddar/src/bin/cheddar.rs b/tools/cheddar/src/bin/cheddar.rs
index 58ef32a1b432..48c504d53590 100644
--- a/tools/cheddar/src/bin/cheddar.rs
+++ b/tools/cheddar/src/bin/cheddar.rs
@@ -5,14 +5,13 @@
 //! 2. As a long-running HTTP server that handles rendering requests
 //!    (matching the SourceGraph protocol).
 use clap::{App, Arg};
-use rouille::Response;
-use rouille::{router, try_or_400};
+use rouille::{router, try_or_400, Response};
 use serde::Deserialize;
 use serde_json::json;
 use std::collections::HashMap;
 use std::io;
 
-use cheddar::{THEMES, format_code, format_markdown};
+use cheddar::{format_code, format_markdown, THEMES};
 
 // Server endpoint for rendering the syntax of source code. This
 // replaces the 'syntect_server' component of Sourcegraph.
diff --git a/tools/cheddar/src/lib.rs b/tools/cheddar/src/lib.rs
index da0eace17556..851bd743db2e 100644
--- a/tools/cheddar/src/lib.rs
+++ b/tools/cheddar/src/lib.rs
@@ -8,12 +8,10 @@ use lazy_static::lazy_static;
 use regex::Regex;
 use std::cell::RefCell;
 use std::collections::HashMap;
-use std::env;
 use std::ffi::OsStr;
-use std::io;
-use std::io::BufRead;
-use std::io::Write;
+use std::io::{BufRead, Write};
 use std::path::Path;
+use std::{env, io};
 use syntect::dumps::from_binary;
 use syntect::easy::HighlightLines;
 use syntect::highlighting::{Theme, ThemeSet};
diff --git a/tools/rust-crates-advisory/check-security-advisory.rs b/tools/rust-crates-advisory/check-security-advisory.rs
index 3fd9bc2dd947..e76b090abccb 100644
--- a/tools/rust-crates-advisory/check-security-advisory.rs
+++ b/tools/rust-crates-advisory/check-security-advisory.rs
@@ -14,42 +14,89 @@ use std::io::Write;
 fn main() {
     let mut args = std::env::args_os();
     let file = args.nth(1).expect("security advisory md file is $1");
-    let crate_version =
-        args.nth(0).expect("crate version is $2")
-        .into_string().expect("crate version string not utf8")
-        ;
-    let crate_version = semver::Version::parse(&crate_version).expect(&format!("this is not a semver version: {}", &crate_version));
+    let crate_version = args
+        .nth(0)
+        .expect("crate version is $2")
+        .into_string()
+        .expect("crate version string not utf8");
+    let crate_version = semver::Version::parse(&crate_version)
+        .expect(&format!("this is not a semver version: {}", &crate_version));
     let filename = file.to_string_lossy();
 
     let content = std::fs::read(&file).expect(&format!("could not read {}", filename));
-    let content =
-        std::str::from_utf8(&content).expect(&format!("file {} was not encoded as utf-8", filename));
+    let content = std::str::from_utf8(&content)
+        .expect(&format!("file {} was not encoded as utf-8", filename));
     let content = content.trim_start();
 
     let toml_start = content
-        .strip_prefix("```toml").expect(&format!("file did not start with ```toml: {}", filename));
-    let toml_end_index = toml_start.find("```").expect(&format!("the toml section did not end, no `` found: {}", filename));
+        .strip_prefix("```toml")
+        .expect(&format!("file did not start with ```toml: {}", filename));
+    let toml_end_index = toml_start.find("```").expect(&format!(
+        "the toml section did not end, no `` found: {}",
+        filename
+    ));
     let toml = &toml_start[..toml_end_index];
-    let toml : toml::Value = toml::de::from_slice(toml.as_bytes()).expect(&format!("could not parse toml: {}", filename));
+    let toml: toml::Value = toml::de::from_slice(toml.as_bytes())
+        .expect(&format!("could not parse toml: {}", filename));
 
     let versions = toml
-        .as_table().expect(&format!("the toml is not a table: {}", filename))
-        .get("versions").expect(&format!("the toml does not contain the versions field: {}", filename))
-        .as_table().expect(&format!("the toml versions field must be a table: {}", filename));
+        .as_table()
+        .expect(&format!("the toml is not a table: {}", filename))
+        .get("versions")
+        .expect(&format!(
+            "the toml does not contain the versions field: {}",
+            filename
+        ))
+        .as_table()
+        .expect(&format!(
+            "the toml versions field must be a table: {}",
+            filename
+        ));
 
     let unaffected = match versions.get("unaffected") {
         Some(u) => u
-            .as_array().expect(&format!("the toml versions.unaffected field must be a list of semvers: {}", filename))
+            .as_array()
+            .expect(&format!(
+                "the toml versions.unaffected field must be a list of semvers: {}",
+                filename
+            ))
             .iter()
-            .map(|v| semver::VersionReq::parse(v.as_str().expect(&format!("the version field {} is not a string", v))).expect(&format!("the version field {} is not a valid semver VersionReq", v)))
+            .map(|v| {
+                semver::VersionReq::parse(
+                    v.as_str()
+                        .expect(&format!("the version field {} is not a string", v)),
+                )
+                .expect(&format!(
+                    "the version field {} is not a valid semver VersionReq",
+                    v
+                ))
+            })
             .collect(),
-        None => vec![]
+        None => vec![],
     };
 
-    let mut patched : Vec<semver::VersionReq> = versions.get("patched").expect(&format!("the toml versions.patched field must exist: {}", filename))
-        .as_array().expect(&format!("the toml versions.patched field must be a list of semvers: {}", filename))
+    let mut patched: Vec<semver::VersionReq> = versions
+        .get("patched")
+        .expect(&format!(
+            "the toml versions.patched field must exist: {}",
+            filename
+        ))
+        .as_array()
+        .expect(&format!(
+            "the toml versions.patched field must be a list of semvers: {}",
+            filename
+        ))
         .iter()
-        .map(|v| semver::VersionReq::parse(v.as_str().expect(&format!("the version field {} is not a string", v))).expect(&format!("the version field {} is not a valid semver VersionReq", v)))
+        .map(|v| {
+            semver::VersionReq::parse(
+                v.as_str()
+                    .expect(&format!("the version field {} is not a string", v)),
+            )
+            .expect(&format!(
+                "the version field {} is not a valid semver VersionReq",
+                v
+            ))
+        })
         .collect();
 
     patched.extend_from_slice(&unaffected[..]);
@@ -59,9 +106,14 @@ fn main() {
         std::process::exit(0);
     } else {
         if std::env::var_os("PRINT_ADVISORY").is_some() {
-            write!(std::io::stderr(), "Advisory {} matched!\n{}\n", filename, content).unwrap();
+            write!(
+                std::io::stderr(),
+                "Advisory {} matched!\n{}\n",
+                filename,
+                content
+            )
+            .unwrap();
         }
         std::process::exit(1);
     }
-
 }
diff --git a/tvix/src/bin/nix-store.rs b/tvix/src/bin/nix-store.rs
index 72e3bb22048e..e1568fff73f0 100644
--- a/tvix/src/bin/nix-store.rs
+++ b/tvix/src/bin/nix-store.rs
@@ -25,7 +25,8 @@ pub enum NixResult {
 
 #[cfg(test)]
 mod integration_tests {
-    use std::{collections::VecDeque, io::Write};
+    use std::collections::VecDeque;
+    use std::io::Write;
 
     use super::*;
 
diff --git a/users/Profpatsch/execline/exec_helpers.rs b/users/Profpatsch/execline/exec_helpers.rs
index b9e1f5797386..a57cbca35391 100644
--- a/users/Profpatsch/execline/exec_helpers.rs
+++ b/users/Profpatsch/execline/exec_helpers.rs
@@ -1,13 +1,16 @@
-use std::os::unix::process::CommandExt;
 use std::ffi::OsStr;
-use std::os::unix::ffi::{OsStringExt, OsStrExt};
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
+use std::os::unix::process::CommandExt;
 
 pub fn no_args(current_prog_name: &str) -> () {
     let mut args = std::env::args_os();
     // remove argv[0]
     let _ = args.nth(0);
     if args.len() > 0 {
-        die_user_error(current_prog_name, format!("Expected no arguments, got {:?}", args.collect::<Vec<_>>()))
+        die_user_error(
+            current_prog_name,
+            format!("Expected no arguments, got {:?}", args.collect::<Vec<_>>()),
+        )
     }
 }
 
@@ -16,31 +19,46 @@ pub fn args(current_prog_name: &str, no_of_positional_args: usize) -> Vec<Vec<u8
     // remove argv[0]
     let _ = args.nth(0);
     if args.len() != no_of_positional_args {
-        die_user_error(current_prog_name, format!("Expected {} arguments, got {}, namely {:?}", no_of_positional_args, args.len(), args.collect::<Vec<_>>()))
+        die_user_error(
+            current_prog_name,
+            format!(
+                "Expected {} arguments, got {}, namely {:?}",
+                no_of_positional_args,
+                args.len(),
+                args.collect::<Vec<_>>()
+            ),
+        )
     }
     args.map(|arg| arg.into_vec()).collect()
 }
 
-pub fn args_for_exec(current_prog_name: &str, no_of_positional_args: usize) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
+pub fn args_for_exec(
+    current_prog_name: &str,
+    no_of_positional_args: usize,
+) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
     let mut args = std::env::args_os();
     // remove argv[0]
     let _ = args.nth(0);
     let mut args = args.map(|arg| arg.into_vec());
     let mut pos_args = vec![];
     // get positional args
-    for i in 1..no_of_positional_args+1 {
-            pos_args.push(
-                args.nth(0).expect(
-                    &format!("{}: expects {} positional args, only got {}", current_prog_name, no_of_positional_args, i))
-            );
+    for i in 1..no_of_positional_args + 1 {
+        pos_args.push(args.nth(0).expect(&format!(
+            "{}: expects {} positional args, only got {}",
+            current_prog_name, no_of_positional_args, i
+        )));
     }
     // prog... is the rest of the iterator
-    let prog : Vec<Vec<u8>> = args.collect();
+    let prog: Vec<Vec<u8>> = args.collect();
     (pos_args, prog)
 }
 
-pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(current_prog_name: &str, args: Args, env_additions: Env) -> !
-    where
+pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(
+    current_prog_name: &str,
+    args: Args,
+    env_additions: Env,
+) -> !
+where
     Args: IntoIterator<Item = Arg>,
     Arg: AsRef<[u8]>,
     Env: IntoIterator<Item = (Key, Val)>,
@@ -50,27 +68,40 @@ pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(current_prog_name: &str,
     // TODO: is this possible without collecting into a Vec first, just leaving it an IntoIterator?
     let args = args.into_iter().collect::<Vec<Arg>>();
     let mut args = args.iter().map(|v| OsStr::from_bytes(v.as_ref()));
-    let prog = args.nth(0).expect(&format!("{}: first argument must be an executable", current_prog_name));
+    let prog = args.nth(0).expect(&format!(
+        "{}: first argument must be an executable",
+        current_prog_name
+    ));
     // TODO: same here
     let env = env_additions.into_iter().collect::<Vec<(Key, Val)>>();
-    let env = env.iter().map(|(k,v)| (OsStr::from_bytes(k.as_ref()), OsStr::from_bytes(v.as_ref())));
+    let env = env
+        .iter()
+        .map(|(k, v)| (OsStr::from_bytes(k.as_ref()), OsStr::from_bytes(v.as_ref())));
     let err = std::process::Command::new(prog).args(args).envs(env).exec();
-    die_missing_executable(current_prog_name, format!("exec failed: {}, while trying to execing into {:?}", err, prog));
+    die_missing_executable(
+        current_prog_name,
+        format!(
+            "exec failed: {}, while trying to execing into {:?}",
+            err, prog
+        ),
+    );
 }
 
 /// Exit 1 to signify a generic expected error
 /// (e.g. something that sometimes just goes wrong, like a nix build).
 pub fn die_expected_error<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
-  die_with(1, current_prog_name, msg)
+    die_with(1, current_prog_name, msg)
 }
 
 /// Exit 100 to signify a user error (โ€œthe user is holding it wrongโ€).
 /// This is a permanent error, if the program is executed the same way
 /// it should crash with 100 again.
 pub fn die_user_error<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     die_with(100, current_prog_name, msg)
 }
@@ -78,14 +109,16 @@ where S: AsRef<str>
 /// Exit 101 to signify an unexpected crash (failing assertion or panic).
 /// This is the same exit code that `panic!()` emits.
 pub fn die_panic<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     die_with(101, current_prog_name, msg)
 }
 
 /// Exit 111 to signify a temporary error (such as resource exhaustion)
 pub fn die_temporary<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     die_with(111, current_prog_name, msg)
 }
@@ -93,20 +126,23 @@ where S: AsRef<str>
 /// Exit 126 to signify an environment problem
 /// (the user has set up stuff incorrectly so the program cannot work)
 pub fn die_environment_problem<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     die_with(126, current_prog_name, msg)
 }
 
 /// Exit 127 to signify a missing executable.
 pub fn die_missing_executable<S>(current_prog_name: &str, msg: S) -> !
-where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     die_with(127, current_prog_name, msg)
 }
 
 fn die_with<S>(status: i32, current_prog_name: &str, msg: S) -> !
-    where S: AsRef<str>
+where
+    S: AsRef<str>,
 {
     eprintln!("{}: {}", current_prog_name, msg.as_ref());
     std::process::exit(status)
diff --git a/users/Profpatsch/git-db/git-db.rs b/users/Profpatsch/git-db/git-db.rs
index 5c6bb1f85646..c8019bf03661 100644
--- a/users/Profpatsch/git-db/git-db.rs
+++ b/users/Profpatsch/git-db/git-db.rs
@@ -2,7 +2,7 @@ extern crate git2;
 use std::os::unix::ffi::OsStrExt;
 use std::path::PathBuf;
 
-const DEFAULT_BRANCH : &str = "refs/heads/main";
+const DEFAULT_BRANCH: &str = "refs/heads/main";
 
 fn main() {
     let git_db_dir = std::env::var_os("GIT_DB_DIR").expect("set GIT_DB_DIR");
@@ -16,8 +16,12 @@ fn main() {
             .bare(true)
             .mkpath(true)
             .description("git-db database")
-            .initial_head(DEFAULT_BRANCH)
-    ).expect(&format!("unable to create or open bare git repo at {}", &git_db.display()));
+            .initial_head(DEFAULT_BRANCH),
+    )
+    .expect(&format!(
+        "unable to create or open bare git repo at {}",
+        &git_db.display()
+    ));
 
     let mut index = repo.index().expect("cannot get the git index file");
     eprintln!("{:#?}", index.version());
@@ -34,8 +38,9 @@ fn main() {
 
     let data = "hi, itโ€™s me".as_bytes();
 
-    index.add_frombuffer(
-        &git2::IndexEntry {
+    index
+        .add_frombuffer(
+            &git2::IndexEntry {
             mtime: now_git_time,
             ctime: now_git_time,
             // donโ€™t make sense
@@ -50,25 +55,26 @@ fn main() {
             flags_extended: 0,
             path: "hi.txt".as_bytes().to_owned(),
         },
-        data
-    ).expect("could not add data to index");
+            data,
+        )
+        .expect("could not add data to index");
 
     let oid = index.write_tree().expect("could not write index tree");
 
-    let to_add_tree = repo.find_tree(oid)
+    let to_add_tree = repo
+        .find_tree(oid)
         .expect("we just created this tree, where did it go?");
 
     let parent_commits = match repo.find_reference(DEFAULT_BRANCH) {
-        Ok(ref_) => vec![
-            ref_
-            .peel_to_commit()
-            .expect(&format!("reference {} does not point to a commit", DEFAULT_BRANCH))
-        ],
+        Ok(ref_) => vec![ref_.peel_to_commit().expect(&format!(
+            "reference {} does not point to a commit",
+            DEFAULT_BRANCH
+        ))],
         Err(err) => match err.code() {
             // no commit exists yet
             git2::ErrorCode::NotFound => vec![],
             _ => panic!("could not read latest commit from {}", DEFAULT_BRANCH),
-        }
+        },
     };
     repo.commit(
         Some(DEFAULT_BRANCH),
@@ -79,7 +85,6 @@ fn main() {
          I wonder if it supports extended commit descriptions?\n",
         &to_add_tree,
         &parent_commits.iter().collect::<Vec<_>>()[..],
-    ).expect("could not commit the index we just wrote");
-
-
+    )
+    .expect("could not commit the index we just wrote");
 }
diff --git a/users/Profpatsch/imap-idle.rs b/users/Profpatsch/imap-idle.rs
index 9dce736d0d8a..937847b8798a 100644
--- a/users/Profpatsch/imap-idle.rs
+++ b/users/Profpatsch/imap-idle.rs
@@ -1,16 +1,16 @@
 extern crate exec_helpers;
 // extern crate arglib_netencode;
 // extern crate netencode;
-extern crate imap;
 extern crate epoll;
+extern crate imap;
 
 // use netencode::dec;
+use imap::extensions::idle::SetReadTimeout;
 use std::convert::TryFrom;
-use std::io::{Read, Write};
 use std::fs::File;
-use std::os::unix::io::{FromRawFd, AsRawFd, RawFd};
+use std::io::{Read, Write};
+use std::os::unix::io::{AsRawFd, FromRawFd, RawFd};
 use std::time::Duration;
-use imap::extensions::idle::SetReadTimeout;
 
 /// Implements an UCSPI client that wraps fd 6 & 7
 /// and implements Write and Read with a timeout.
@@ -33,7 +33,7 @@ impl UcspiClient {
                 read: File::from_raw_fd(6),
                 read_epoll_fd,
                 read_timeout: None,
-                write: File::from_raw_fd(7)
+                write: File::from_raw_fd(7),
             })
         }
     }
@@ -54,21 +54,23 @@ impl SetReadTimeout for UcspiClient {
 impl Read for UcspiClient {
     // TODO: test the epoll code with a short timeout
     fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
-        const NO_DATA : u64 = 0;
+        const NO_DATA: u64 = 0;
         // in order to implement the read_timeout,
         // we use epoll to wait for either data or time out
         epoll::ctl(
             self.read_epoll_fd,
             epoll::ControlOptions::EPOLL_CTL_ADD,
             self.read.as_raw_fd(),
-            epoll::Event::new(epoll::Events::EPOLLIN, NO_DATA)
+            epoll::Event::new(epoll::Events::EPOLLIN, NO_DATA),
         )?;
         let UNUSED = epoll::Event::new(epoll::Events::EPOLLIN, NO_DATA);
         let wait = epoll::wait(
             self.read_epoll_fd,
             match self.read_timeout {
-                Some(duration) => i32::try_from(duration.as_millis()).expect("duration too big for epoll"),
-                None => -1 // infinite
+                Some(duration) => {
+                    i32::try_from(duration.as_millis()).expect("duration too big for epoll")
+                }
+                None => -1, // infinite
             },
             // event that was generated; but we donโ€™t care
             &mut vec![UNUSED; 1][..],
@@ -79,11 +81,14 @@ impl Read for UcspiClient {
             self.read_epoll_fd,
             epoll::ControlOptions::EPOLL_CTL_DEL,
             self.read.as_raw_fd(),
-            UNUSED
+            UNUSED,
         )?;
         match wait {
             // timeout happened (0 events)
-            Ok(0) => Err(std::io::Error::new(std::io::ErrorKind::TimedOut, "ucspi read timeout")),
+            Ok(0) => Err(std::io::Error::new(
+                std::io::ErrorKind::TimedOut,
+                "ucspi read timeout",
+            )),
             // its ready for reading, we can read
             Ok(_) => self.read.read(buf),
             // error
@@ -110,18 +115,21 @@ fn main() {
     let username = std::env::var("IMAP_USERNAME").expect("username");
     let password = std::env::var("IMAP_PASSWORD").expect("password");
 
-    let net = unsafe {
-        UcspiClient::new_from_6_and_7().expect("no ucspi client for you")
-    };
+    let net = unsafe { UcspiClient::new_from_6_and_7().expect("no ucspi client for you") };
     let client = imap::Client::new(net);
-    let mut session = client.login(username, password).map_err(|(err, _)| err).expect("unable to login");
+    let mut session = client
+        .login(username, password)
+        .map_err(|(err, _)| err)
+        .expect("unable to login");
     eprintln!("{:#?}", session);
     let list = session.list(None, Some("*"));
     eprintln!("{:#?}", list);
     let mailbox = session.examine("INBOX");
     eprintln!("{:#?}", mailbox);
     fn now() -> String {
-        String::from_utf8_lossy(&std::process::Command::new("date").output().unwrap().stdout).trim_right().to_string()
+        String::from_utf8_lossy(&std::process::Command::new("date").output().unwrap().stdout)
+            .trim_right()
+            .to_string()
     }
     loop {
         eprintln!("{}: idling on INBOX", now());
diff --git a/users/Profpatsch/netencode/netencode-mustache.rs b/users/Profpatsch/netencode/netencode-mustache.rs
index ee7bafed2250..73ed5be1ded2 100644
--- a/users/Profpatsch/netencode/netencode-mustache.rs
+++ b/users/Profpatsch/netencode/netencode-mustache.rs
@@ -1,12 +1,12 @@
-extern crate netencode;
-extern crate mustache;
 extern crate arglib_netencode;
+extern crate mustache;
+extern crate netencode;
 
-use mustache::{Data};
-use netencode::{T};
+use mustache::Data;
+use netencode::T;
 use std::collections::HashMap;
-use std::os::unix::ffi::{OsStrExt};
-use std::io::{Read};
+use std::io::Read;
+use std::os::unix::ffi::OsStrExt;
 
 fn netencode_to_mustache_data_dwim(t: T) -> Data {
     match t {
@@ -25,27 +25,26 @@ fn netencode_to_mustache_data_dwim(t: T) -> Data {
         T::Record(xs) => Data::Map(
             xs.into_iter()
                 .map(|(key, val)| (key, netencode_to_mustache_data_dwim(val)))
-                .collect::<HashMap<_,_>>()
+                .collect::<HashMap<_, _>>(),
         ),
         T::List(xs) => Data::Vec(
             xs.into_iter()
                 .map(|x| netencode_to_mustache_data_dwim(x))
-                .collect::<Vec<_>>()
+                .collect::<Vec<_>>(),
         ),
     }
 }
 
 pub fn from_stdin() -> () {
-    let data = netencode_to_mustache_data_dwim(
-        arglib_netencode::arglib_netencode("netencode-mustache", Some(std::ffi::OsStr::new("TEMPLATE_DATA")))
-    );
+    let data = netencode_to_mustache_data_dwim(arglib_netencode::arglib_netencode(
+        "netencode-mustache",
+        Some(std::ffi::OsStr::new("TEMPLATE_DATA")),
+    ));
     let mut stdin = String::new();
     std::io::stdin().read_to_string(&mut stdin).unwrap();
     mustache::compile_str(&stdin)
-        .and_then(|templ| templ.render_data(
-            &mut std::io::stdout(),
-            &data
-        )).unwrap()
+        .and_then(|templ| templ.render_data(&mut std::io::stdout(), &data))
+        .unwrap()
 }
 
 pub fn main() {
diff --git a/users/Profpatsch/netencode/netencode.rs b/users/Profpatsch/netencode/netencode.rs
index 5bd43f992f0b..0d92cf1ed4ae 100644
--- a/users/Profpatsch/netencode/netencode.rs
+++ b/users/Profpatsch/netencode/netencode.rs
@@ -1,9 +1,9 @@
-extern crate nom;
 extern crate exec_helpers;
+extern crate nom;
 
 use std::collections::HashMap;
-use std::io::{Write, Read};
-use std::fmt::{Display, Debug};
+use std::fmt::{Debug, Display};
+use std::io::{Read, Write};
 
 #[derive(Debug, PartialEq, Eq, Clone)]
 pub enum T {
@@ -46,22 +46,19 @@ impl T {
             T::I7(i) => U::I7(*i),
             T::Text(t) => U::Text(t.as_str()),
             T::Binary(v) => U::Binary(v),
-            T::Sum(Tag { tag, val }) => U::Sum(
-                Tag { tag: tag.as_str(), val: Box::new(val.to_u()) }
-            ),
-            T::Record(map) => U::Record(
-                map.iter().map(|(k, v)| (k.as_str(), v.to_u())).collect()
-            ),
-            T::List(l) => U::List(
-                l.iter().map(|v| v.to_u()).collect::<Vec<U<'a>>>()
-            ),
+            T::Sum(Tag { tag, val }) => U::Sum(Tag {
+                tag: tag.as_str(),
+                val: Box::new(val.to_u()),
+            }),
+            T::Record(map) => U::Record(map.iter().map(|(k, v)| (k.as_str(), v.to_u())).collect()),
+            T::List(l) => U::List(l.iter().map(|v| v.to_u()).collect::<Vec<U<'a>>>()),
         }
     }
 
     pub fn encode<'a>(&'a self) -> Vec<u8> {
         match self {
             // TODO: donโ€™t go via U, inefficient
-            o => o.to_u().encode()
+            o => o.to_u().encode(),
         }
     }
 }
@@ -110,15 +107,16 @@ impl<'a> U<'a> {
             U::I7(i) => T::I7(*i),
             U::Text(t) => T::Text((*t).to_owned()),
             U::Binary(v) => T::Binary((*v).to_owned()),
-            U::Sum(Tag { tag, val }) => T::Sum(
-                Tag { tag: (*tag).to_owned(), val: Box::new(val.to_t()) }
-            ),
+            U::Sum(Tag { tag, val }) => T::Sum(Tag {
+                tag: (*tag).to_owned(),
+                val: Box::new(val.to_t()),
+            }),
             U::Record(map) => T::Record(
-                map.iter().map(|(k, v)| ((*k).to_owned(), v.to_t())).collect::<HashMap<String, T>>()
-            ),
-            U::List(l) => T::List(
-                l.iter().map(|v| v.to_t()).collect::<Vec<T>>()
+                map.iter()
+                    .map(|(k, v)| ((*k).to_owned(), v.to_t()))
+                    .collect::<HashMap<String, T>>(),
             ),
+            U::List(l) => T::List(l.iter().map(|v| v.to_t()).collect::<Vec<T>>()),
         }
     }
 }
@@ -127,16 +125,18 @@ impl<'a> U<'a> {
 pub struct Tag<S, A> {
     // TODO: make into &str
     pub tag: S,
-    pub val: Box<A>
+    pub val: Box<A>,
 }
 
 impl<S, A> Tag<S, A> {
     fn map<F, B>(self, f: F) -> Tag<S, B>
-        where F: Fn(A) -> B {
-          Tag {
-              tag: self.tag,
-              val: Box::new(f(*self.val))
-          }
+    where
+        F: Fn(A) -> B,
+    {
+        Tag {
+            tag: self.tag,
+            val: Box::new(f(*self.val)),
+        }
     }
 }
 
@@ -147,45 +147,51 @@ fn encode_tag<W: Write>(w: &mut W, tag: &str, val: &U) -> std::io::Result<()> {
 }
 
 pub fn encode<W: Write>(w: &mut W, u: &U) -> std::io::Result<()> {
-  match u {
-      U::Unit => write!(w, "u,"),
-      U::N1(b) => if *b { write!(w, "n1:1,") } else { write!(w, "n1:0,") },
-      U::N3(n) => write!(w, "n3:{},", n),
-      U::N6(n) => write!(w, "n6:{},", n),
-      U::N7(n) => write!(w, "n7:{},", n),
-      U::I3(i) => write!(w, "i3:{},", i),
-      U::I6(i) => write!(w, "i6:{},", i),
-      U::I7(i) => write!(w, "i7:{},", i),
-      U::Text(s) => {
-          write!(w, "t{}:", s.len());
-          w.write_all(s.as_bytes());
-          write!(w, ",")
-      }
-      U::Binary(s) => {
-          write!(w, "b{}:", s.len());
-          w.write_all(&s);
-          write!(w, ",")
-      },
-      U::Sum(Tag{tag, val}) => encode_tag(w, tag, val),
-      U::Record(m) => {
-          let mut c = std::io::Cursor::new(vec![]);
-          for (k, v) in m {
-              encode_tag(&mut c, k, v)?;
-          }
-          write!(w, "{{{}:", c.get_ref().len())?;
-          w.write_all(c.get_ref())?;
-          write!(w, "}}")
-      },
-      U::List(l) => {
-          let mut c = std::io::Cursor::new(vec![]);
-          for u in l {
-              encode(&mut c, u)?;
-          }
-          write!(w, "[{}:", c.get_ref().len())?;
-          w.write_all(c.get_ref())?;
-          write!(w, "]")
-      }
-  }
+    match u {
+        U::Unit => write!(w, "u,"),
+        U::N1(b) => {
+            if *b {
+                write!(w, "n1:1,")
+            } else {
+                write!(w, "n1:0,")
+            }
+        }
+        U::N3(n) => write!(w, "n3:{},", n),
+        U::N6(n) => write!(w, "n6:{},", n),
+        U::N7(n) => write!(w, "n7:{},", n),
+        U::I3(i) => write!(w, "i3:{},", i),
+        U::I6(i) => write!(w, "i6:{},", i),
+        U::I7(i) => write!(w, "i7:{},", i),
+        U::Text(s) => {
+            write!(w, "t{}:", s.len());
+            w.write_all(s.as_bytes());
+            write!(w, ",")
+        }
+        U::Binary(s) => {
+            write!(w, "b{}:", s.len());
+            w.write_all(&s);
+            write!(w, ",")
+        }
+        U::Sum(Tag { tag, val }) => encode_tag(w, tag, val),
+        U::Record(m) => {
+            let mut c = std::io::Cursor::new(vec![]);
+            for (k, v) in m {
+                encode_tag(&mut c, k, v)?;
+            }
+            write!(w, "{{{}:", c.get_ref().len())?;
+            w.write_all(c.get_ref())?;
+            write!(w, "}}")
+        }
+        U::List(l) => {
+            let mut c = std::io::Cursor::new(vec![]);
+            for u in l {
+                encode(&mut c, u)?;
+            }
+            write!(w, "[{}:", c.get_ref().len())?;
+            w.write_all(c.get_ref())?;
+            write!(w, "]")
+        }
+    }
 }
 
 pub fn text(s: String) -> T {
@@ -197,27 +203,36 @@ pub fn u_from_stdin_or_die_user_error<'a>(prog_name: &'_ str, stdin_buf: &'a mut
     let u = match parse::u_u(stdin_buf) {
         Ok((rest, u)) => match rest {
             b"" => u,
-            _ => exec_helpers::die_user_error(prog_name, format!("stdin contained some soup after netencode value: {:?}", String::from_utf8_lossy(rest)))
+            _ => exec_helpers::die_user_error(
+                prog_name,
+                format!(
+                    "stdin contained some soup after netencode value: {:?}",
+                    String::from_utf8_lossy(rest)
+                ),
+            ),
         },
-        Err(err) => exec_helpers::die_user_error(prog_name, format!("unable to parse netencode from stdin: {:?}", err))
+        Err(err) => exec_helpers::die_user_error(
+            prog_name,
+            format!("unable to parse netencode from stdin: {:?}", err),
+        ),
     };
     u
 }
 
 pub mod parse {
-    use super::{T, Tag, U};
+    use super::{Tag, T, U};
 
-    use std::str::FromStr;
-    use std::ops::Neg;
     use std::collections::HashMap;
+    use std::ops::Neg;
+    use std::str::FromStr;
 
-    use nom::{IResult};
-    use nom::branch::{alt};
+    use nom::branch::alt;
     use nom::bytes::streaming::{tag, take};
-    use nom::character::streaming::{digit1, char};
-    use nom::sequence::{tuple};
-    use nom::combinator::{map, map_res, flat_map, map_parser, opt};
+    use nom::character::streaming::{char, digit1};
+    use nom::combinator::{flat_map, map, map_parser, map_res, opt};
     use nom::error::{context, ErrorKind, ParseError};
+    use nom::sequence::tuple;
+    use nom::IResult;
 
     fn unit_t(s: &[u8]) -> IResult<&[u8], ()> {
         let (s, _) = context("unit", tag("u,"))(s)?;
@@ -227,9 +242,9 @@ pub mod parse {
     fn usize_t(s: &[u8]) -> IResult<&[u8], usize> {
         context(
             "usize",
-            map_res(
-                map_res(digit1, |n| std::str::from_utf8(n)),
-                |s| s.parse::<usize>())
+            map_res(map_res(digit1, |n| std::str::from_utf8(n)), |s| {
+                s.parse::<usize>()
+            }),
         )(s)
     }
 
@@ -238,87 +253,77 @@ pub mod parse {
             // This is the point where we check the descriminator;
             // if the beginning char does not match, we can immediately return.
             let (s, _) = char(begin)(s)?;
-            let (s, (len, _)) = tuple((
-                usize_t,
-                char(':')
-            ))(s)?;
-            let (s, (res, _)) = tuple((
-                take(len),
-                char(end)
-            ))(s)?;
+            let (s, (len, _)) = tuple((usize_t, char(':')))(s)?;
+            let (s, (res, _)) = tuple((take(len), char(end)))(s)?;
             Ok((s, res))
         }
     }
 
-
     fn uint_t<'a, I: FromStr + 'a>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], I> {
         move |s: &'a [u8]| {
             let (s, (_, _, int, _)) = tuple((
                 tag(t.as_bytes()),
                 char(':'),
-                map_res(
-                    map_res(digit1, |n: &[u8]| std::str::from_utf8(n)),
-                    |s| s.parse::<I>()
-                ),
-                char(',')
+                map_res(map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), |s| {
+                    s.parse::<I>()
+                }),
+                char(','),
             ))(s)?;
             Ok((s, int))
         }
     }
 
     fn bool_t<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], bool> {
-        context("bool", alt((
-            map(tag("n1:0,"), |_| false),
-            map(tag("n1:1,"), |_| true),
-        )))
-    }
-
-    fn int_t<'a, I: FromStr + Neg<Output=I>>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&[u8], I> {
         context(
-            t,
-            move |s: &'a [u8]| {
-                let (s, (_, _, neg, int, _)) = tuple((
-                    tag(t.as_bytes()),
-                    char(':'),
-                    opt(char('-')),
-                    map_res(
-                        map_res(digit1, |n: &[u8]| std::str::from_utf8(n)),
-                        |s| s.parse::<I>()
-                    ),
-                    char(',')
-                ))(s)?;
-                let res = match neg {
-                    Some(_) => -int,
-                    None => int,
-                };
-                Ok((s, res))
-            }
+            "bool",
+            alt((map(tag("n1:0,"), |_| false), map(tag("n1:1,"), |_| true))),
         )
     }
 
+    fn int_t<'a, I: FromStr + Neg<Output = I>>(
+        t: &'static str,
+    ) -> impl Fn(&'a [u8]) -> IResult<&[u8], I> {
+        context(t, move |s: &'a [u8]| {
+            let (s, (_, _, neg, int, _)) = tuple((
+                tag(t.as_bytes()),
+                char(':'),
+                opt(char('-')),
+                map_res(map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), |s| {
+                    s.parse::<I>()
+                }),
+                char(','),
+            ))(s)?;
+            let res = match neg {
+                Some(_) => -int,
+                None => int,
+            };
+            Ok((s, res))
+        })
+    }
+
     fn tag_t(s: &[u8]) -> IResult<&[u8], Tag<String, T>> {
         // recurses into the main parser
-        map(tag_g(t_t),
-            |Tag {tag, val}|
-            Tag {
-                tag: tag.to_string(),
-                val
-            })(s)
+        map(tag_g(t_t), |Tag { tag, val }| Tag {
+            tag: tag.to_string(),
+            val,
+        })(s)
     }
 
     fn tag_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Tag<&'a str, O>>
     where
-        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
     {
         move |s: &[u8]| {
             let (s, tag) = sized('<', '|')(s)?;
             let (s, val) = inner(s)?;
-            Ok((s, Tag {
-                tag: std::str::from_utf8(tag)
-                    .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
-                val: Box::new(val)
-            }))
-
+            Ok((
+                s,
+                Tag {
+                    tag: std::str::from_utf8(tag)
+                        .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
+                    val: Box::new(val),
+                },
+            ))
         }
     }
 
@@ -330,9 +335,9 @@ pub mod parse {
 
     fn text_g(s: &[u8]) -> IResult<&[u8], &str> {
         let (s, res) = sized('t', ',')(s)?;
-        Ok((s,
-            std::str::from_utf8(res)
-                .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
+        Ok((
+            s,
+            std::str::from_utf8(res).map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
         ))
     }
 
@@ -374,22 +379,24 @@ pub mod parse {
     {
         map_parser(
             sized('[', ']'),
-            nom::multi::many0(inner_no_empty_string(inner))
+            nom::multi::many0(inner_no_empty_string(inner)),
         )
     }
 
     fn record_t<'a>(s: &'a [u8]) -> IResult<&'a [u8], HashMap<String, T>> {
         let (s, r) = record_g(t_t)(s)?;
-        Ok((s,
+        Ok((
+            s,
             r.into_iter()
-            .map(|(k, v)| (k.to_string(), v))
-            .collect::<HashMap<_,_>>()))
+                .map(|(k, v)| (k.to_string(), v))
+                .collect::<HashMap<_, _>>(),
+        ))
     }
 
     fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], HashMap<&'a str, O>>
     where
         O: Clone,
-        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
     {
         move |s: &'a [u8]| {
             let (s, map) = map_parser(
@@ -397,19 +404,19 @@ pub mod parse {
                 nom::multi::fold_many0(
                     inner_no_empty_string(tag_g(&inner)),
                     HashMap::new(),
-                    |mut acc: HashMap<_,_>, Tag { tag, mut val }| {
+                    |mut acc: HashMap<_, _>, Tag { tag, mut val }| {
                         // ignore duplicated tag names that appear later
                         // according to netencode spec
-                        if ! acc.contains_key(tag) {
+                        if !acc.contains_key(tag) {
                             acc.insert(tag, *val);
                         }
                         acc
-                    }
-                )
+                    },
+                ),
             )(s)?;
             if map.is_empty() {
                 // records must not be empty, according to the spec
-                Err(nom::Err::Failure((s,nom::error::ErrorKind::Many1)))
+                Err(nom::Err::Failure((s, nom::error::ErrorKind::Many1)))
             } else {
                 Ok((s, map))
             }
@@ -424,7 +431,6 @@ pub mod parse {
             map(tag_g(u_u), |t| U::Sum(t)),
             map(list_g(u_u), U::List),
             map(record_g(u_u), U::Record),
-
             map(bool_t(), |u| U::N1(u)),
             map(uint_t("n3"), |u| U::N3(u)),
             map(uint_t("n6"), |u| U::N6(u)),
@@ -432,7 +438,6 @@ pub mod parse {
             map(int_t("i3"), |u| U::I3(u)),
             map(int_t("i6"), |u| U::I6(u)),
             map(int_t("i7"), |u| U::I7(u)),
-
             // less common
             map(uint_t("n2"), |u| U::N3(u)),
             map(uint_t("n4"), |u| U::N6(u)),
@@ -445,7 +450,7 @@ pub mod parse {
         ))(s)
     }
 
-    pub fn t_t(s: &[u8]) -> IResult<&[u8], T>  {
+    pub fn t_t(s: &[u8]) -> IResult<&[u8], T> {
         alt((
             text,
             binary(),
@@ -453,7 +458,6 @@ pub mod parse {
             map(tag_t, |t| T::Sum(t)),
             map(list_t, |l| T::List(l)),
             map(record_t, |p| T::Record(p)),
-
             map(bool_t(), |u| T::N1(u)),
             // 8, 64 and 128 bit
             map(uint_t("n3"), |u| T::N3(u)),
@@ -462,7 +466,6 @@ pub mod parse {
             map(int_t("i3"), |u| T::I3(u)),
             map(int_t("i6"), |u| T::I6(u)),
             map(int_t("i7"), |u| T::I7(u)),
-
             // less common
             map(uint_t("n2"), |u| T::N3(u)),
             map(uint_t("n4"), |u| T::N6(u)),
@@ -481,30 +484,18 @@ pub mod parse {
 
         #[test]
         fn test_parse_unit_t() {
-            assert_eq!(
-                unit_t("u,".as_bytes()),
-                Ok(("".as_bytes(), ()))
-            );
+            assert_eq!(unit_t("u,".as_bytes()), Ok(("".as_bytes(), ())));
         }
 
         #[test]
         fn test_parse_bool_t() {
-            assert_eq!(
-                bool_t()("n1:0,".as_bytes()),
-                Ok(("".as_bytes(), false))
-            );
-            assert_eq!(
-                bool_t()("n1:1,".as_bytes()),
-                Ok(("".as_bytes(), true))
-            );
+            assert_eq!(bool_t()("n1:0,".as_bytes()), Ok(("".as_bytes(), false)));
+            assert_eq!(bool_t()("n1:1,".as_bytes()), Ok(("".as_bytes(), true)));
         }
 
         #[test]
         fn test_parse_usize_t() {
-            assert_eq!(
-                usize_t("32foo".as_bytes()),
-                Ok(("foo".as_bytes(), 32))
-            );
+            assert_eq!(usize_t("32foo".as_bytes()), Ok(("foo".as_bytes(), 32)));
         }
 
         #[test]
@@ -515,7 +506,10 @@ pub mod parse {
             );
             assert_eq!(
                 uint_t::<u8>("n3")("n3:1024,abc".as_bytes()),
-                Err(nom::Err::Error(("1024,abc".as_bytes(), nom::error::ErrorKind::MapRes)))
+                Err(nom::Err::Error((
+                    "1024,abc".as_bytes(),
+                    nom::error::ErrorKind::MapRes
+                )))
             );
             assert_eq!(
                 int_t::<i64>("i6")("i6:-23,abc".as_bytes()),
@@ -544,18 +538,21 @@ pub mod parse {
             assert_eq!(
                 text("t5:hello,".as_bytes()),
                 Ok(("".as_bytes(), T::Text("hello".to_owned()))),
-                "{}", r"t5:hello,"
+                "{}",
+                r"t5:hello,"
             );
             assert_eq!(
                 text("t4:fo".as_bytes()),
                 // The content of the text should be 4 long
                 Err(nom::Err::Incomplete(nom::Needed::Size(4))),
-                "{}", r"t4:fo,"
+                "{}",
+                r"t4:fo,"
             );
             assert_eq!(
                 text("t9:ไปŠๆ—ฅใฏ,".as_bytes()),
                 Ok(("".as_bytes(), T::Text("ไปŠๆ—ฅใฏ".to_owned()))),
-                "{}", r"t9:ไปŠๆ—ฅใฏ,"
+                "{}",
+                r"t9:ไปŠๆ—ฅใฏ,"
             );
         }
 
@@ -564,24 +561,28 @@ pub mod parse {
             assert_eq!(
                 binary()("b5:hello,".as_bytes()),
                 Ok(("".as_bytes(), T::Binary(Vec::from("hello".to_owned())))),
-                "{}", r"b5:hello,"
+                "{}",
+                r"b5:hello,"
             );
             assert_eq!(
                 binary()("b4:fo".as_bytes()),
                 // The content of the byte should be 4 long
                 Err(nom::Err::Incomplete(nom::Needed::Size(4))),
-                "{}", r"b4:fo,"
+                "{}",
+                r"b4:fo,"
             );
             assert_eq!(
                 binary()("b4:foob".as_bytes()),
                 // The content is 4 bytes now, but the finishing , is missing
                 Err(nom::Err::Incomplete(nom::Needed::Size(1))),
-                    "{}", r"b4:fo,"
-                );
+                "{}",
+                r"b4:fo,"
+            );
             assert_eq!(
                 binary()("b9:ไปŠๆ—ฅใฏ,".as_bytes()),
                 Ok(("".as_bytes(), T::Binary(Vec::from("ไปŠๆ—ฅใฏ".as_bytes())))),
-                "{}", r"b9:ไปŠๆ—ฅใฏ,"
+                "{}",
+                r"b9:ไปŠๆ—ฅใฏ,"
             );
         }
 
@@ -590,25 +591,23 @@ pub mod parse {
             assert_eq!(
                 list_t("[0:]".as_bytes()),
                 Ok(("".as_bytes(), vec![])),
-                "{}", r"[0:]"
+                "{}",
+                r"[0:]"
             );
             assert_eq!(
                 list_t("[6:u,u,u,]".as_bytes()),
-                Ok(("".as_bytes(), vec![
-                    T::Unit,
-                    T::Unit,
-                    T::Unit,
-                ])),
-                "{}", r"[6:u,u,u,]"
+                Ok(("".as_bytes(), vec![T::Unit, T::Unit, T::Unit,])),
+                "{}",
+                r"[6:u,u,u,]"
             );
             assert_eq!(
                 list_t("[15:u,[7:t3:foo,]u,]".as_bytes()),
-                Ok(("".as_bytes(), vec![
-                    T::Unit,
-                    T::List(vec![T::Text("foo".to_owned())]),
-                    T::Unit,
-                ])),
-                "{}", r"[15:u,[7:t3:foo,]u,]"
+                Ok((
+                    "".as_bytes(),
+                    vec![T::Unit, T::List(vec![T::Text("foo".to_owned())]), T::Unit,]
+                )),
+                "{}",
+                r"[15:u,[7:t3:foo,]u,]"
             );
         }
 
@@ -616,27 +615,40 @@ pub mod parse {
         fn test_record() {
             assert_eq!(
                 record_t("{21:<1:a|u,<1:b|u,<1:c|u,}".as_bytes()),
-                Ok(("".as_bytes(), vec![
-                    ("a".to_owned(), T::Unit),
-                    ("b".to_owned(), T::Unit),
-                    ("c".to_owned(), T::Unit),
-                ].into_iter().collect::<HashMap<String, T>>())),
-                "{}", r"{21:<1:a|u,<1:b|u,<1:c|u,}"
+                Ok((
+                    "".as_bytes(),
+                    vec![
+                        ("a".to_owned(), T::Unit),
+                        ("b".to_owned(), T::Unit),
+                        ("c".to_owned(), T::Unit),
+                    ]
+                    .into_iter()
+                    .collect::<HashMap<String, T>>()
+                )),
+                "{}",
+                r"{21:<1:a|u,<1:b|u,<1:c|u,}"
             );
             // duplicated keys are ignored (first is taken)
             assert_eq!(
                 record_t("{25:<1:a|u,<1:b|u,<1:a|i1:-1,}".as_bytes()),
-                Ok(("".as_bytes(), vec![
-                    ("a".to_owned(), T::Unit),
-                    ("b".to_owned(), T::Unit),
-                ].into_iter().collect::<HashMap<_,_>>())),
-                "{}", r"{25:<1:a|u,<1:b|u,<1:a|i1:-1,}"
+                Ok((
+                    "".as_bytes(),
+                    vec![("a".to_owned(), T::Unit), ("b".to_owned(), T::Unit),]
+                        .into_iter()
+                        .collect::<HashMap<_, _>>()
+                )),
+                "{}",
+                r"{25:<1:a|u,<1:b|u,<1:a|i1:-1,}"
             );
             // empty records are not allowed
             assert_eq!(
                 record_t("{0:}".as_bytes()),
-                Err(nom::Err::Failure(("".as_bytes(), nom::error::ErrorKind::Many1))),
-                "{}", r"{0:}"
+                Err(nom::Err::Failure((
+                    "".as_bytes(),
+                    nom::error::ErrorKind::Many1
+                ))),
+                "{}",
+                r"{0:}"
             );
         }
 
@@ -645,37 +657,62 @@ pub mod parse {
             assert_eq!(
                 t_t("n3:255,".as_bytes()),
                 Ok(("".as_bytes(), T::N3(255))),
-                "{}", r"n3:255,"
+                "{}",
+                r"n3:255,"
             );
             assert_eq!(
                 t_t("t6:halloo,".as_bytes()),
                 Ok(("".as_bytes(), T::Text("halloo".to_owned()))),
-                "{}", r"t6:halloo,"
+                "{}",
+                r"t6:halloo,"
             );
             assert_eq!(
                 t_t("<3:foo|t6:halloo,".as_bytes()),
-                Ok(("".as_bytes(), T::Sum (Tag {
-                    tag: "foo".to_owned(),
-                    val: Box::new(T::Text("halloo".to_owned()))
-                }))),
-                "{}", r"<3:foo|t6:halloo,"
+                Ok((
+                    "".as_bytes(),
+                    T::Sum(Tag {
+                        tag: "foo".to_owned(),
+                        val: Box::new(T::Text("halloo".to_owned()))
+                    })
+                )),
+                "{}",
+                r"<3:foo|t6:halloo,"
             );
             // { a: Unit
             // , foo: List <A: Unit | B: List i3> }
             assert_eq!(
                 t_t("{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}".as_bytes()),
-                Ok(("".as_bytes(), T::Record(vec![
-                    ("a".to_owned(), T::Unit),
-                    ("foo".to_owned(), T::List(vec![
-                        T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::Unit) }),
-                        T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::N1(true)) }),
-                        T::Sum(Tag { tag: "B".to_owned(), val: Box::new(T::List(vec![T::I3(127)])) }),
-                    ]))
-                ].into_iter().collect::<HashMap<String, T>>()))),
-                "{}", r"{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}"
+                Ok((
+                    "".as_bytes(),
+                    T::Record(
+                        vec![
+                            ("a".to_owned(), T::Unit),
+                            (
+                                "foo".to_owned(),
+                                T::List(vec![
+                                    T::Sum(Tag {
+                                        tag: "A".to_owned(),
+                                        val: Box::new(T::Unit)
+                                    }),
+                                    T::Sum(Tag {
+                                        tag: "A".to_owned(),
+                                        val: Box::new(T::N1(true))
+                                    }),
+                                    T::Sum(Tag {
+                                        tag: "B".to_owned(),
+                                        val: Box::new(T::List(vec![T::I3(127)]))
+                                    }),
+                                ])
+                            )
+                        ]
+                        .into_iter()
+                        .collect::<HashMap<String, T>>()
+                    )
+                )),
+                "{}",
+                r"{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}"
             );
         }
-
     }
 }
 
@@ -735,7 +772,10 @@ pub mod dec {
         fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
             match u {
                 U::Binary(b) => Ok(b),
-                other => Err(DecodeError(format!("Cannot decode {:?} into Binary", other))),
+                other => Err(DecodeError(format!(
+                    "Cannot decode {:?} into Binary",
+                    other
+                ))),
             }
         }
     }
@@ -766,16 +806,17 @@ pub mod dec {
     pub struct Record<T>(pub T);
 
     impl<'a, Inner> Decoder<'a> for Record<Inner>
-        where Inner: Decoder<'a>
+    where
+        Inner: Decoder<'a>,
     {
         type A = HashMap<&'a str, Inner::A>;
         fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
             match u {
-                U::Record(map) =>
-                    map.into_iter()
+                U::Record(map) => map
+                    .into_iter()
                     .map(|(k, v)| self.0.dec(v).map(|v2| (k, v2)))
                     .collect::<Result<Self::A, _>>(),
-                o => Err(DecodeError(format!("Cannot decode {:?} into record", o)))
+                o => Err(DecodeError(format!("Cannot decode {:?} into record", o))),
             }
         }
     }
@@ -784,18 +825,22 @@ pub mod dec {
     #[derive(Clone, Copy)]
     pub struct RecordDot<'a, T> {
         pub field: &'a str,
-        pub inner: T
+        pub inner: T,
     }
 
-    impl <'a, Inner> Decoder<'a> for RecordDot<'_, Inner>
-        where Inner: Decoder<'a> + Clone
+    impl<'a, Inner> Decoder<'a> for RecordDot<'_, Inner>
+    where
+        Inner: Decoder<'a> + Clone,
     {
         type A = Inner::A;
         fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
             match Record(self.inner.clone()).dec(u) {
                 Ok(mut map) => match map.remove(self.field) {
                     Some(inner) => Ok(inner),
-                    None => Err(DecodeError(format!("Cannot find `{}` in record map", self.field))),
+                    None => Err(DecodeError(format!(
+                        "Cannot find `{}` in record map",
+                        self.field
+                    ))),
                 },
                 Err(err) => Err(err),
             }
@@ -804,23 +849,27 @@ pub mod dec {
 
     /// Equals one of the listed `A`s exactly, after decoding.
     #[derive(Clone)]
-    pub struct OneOf<T, A>{
+    pub struct OneOf<T, A> {
         pub inner: T,
         pub list: Vec<A>,
     }
 
-    impl <'a, Inner> Decoder<'a> for OneOf<Inner, Inner::A>
-        where Inner: Decoder<'a>,
-              Inner::A: Display + Debug + PartialEq
+    impl<'a, Inner> Decoder<'a> for OneOf<Inner, Inner::A>
+    where
+        Inner: Decoder<'a>,
+        Inner::A: Display + Debug + PartialEq,
     {
         type A = Inner::A;
         fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
             match self.inner.dec(u) {
                 Ok(inner) => match self.list.iter().any(|x| x.eq(&inner)) {
                     true => Ok(inner),
-                    false => Err(DecodeError(format!("{} is not one of {:?}", inner, self.list)))
+                    false => Err(DecodeError(format!(
+                        "{} is not one of {:?}",
+                        inner, self.list
+                    ))),
                 },
-                Err(err) => Err(err)
+                Err(err) => Err(err),
             }
         }
     }
@@ -829,16 +878,16 @@ pub mod dec {
     #[derive(Clone)]
     pub struct Try<T>(pub T);
 
-    impl <'a, Inner> Decoder<'a> for Try<Inner>
-        where Inner: Decoder<'a>
+    impl<'a, Inner> Decoder<'a> for Try<Inner>
+    where
+        Inner: Decoder<'a>,
     {
         type A = Option<Inner::A>;
         fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
             match self.0.dec(u) {
                 Ok(inner) => Ok(Some(inner)),
-                Err(err) => Ok(None)
+                Err(err) => Ok(None),
             }
         }
     }
-
 }
diff --git a/users/Profpatsch/netencode/pretty.rs b/users/Profpatsch/netencode/pretty.rs
index 8fec24a60e1a..935c3d4a8a17 100644
--- a/users/Profpatsch/netencode/pretty.rs
+++ b/users/Profpatsch/netencode/pretty.rs
@@ -1,6 +1,6 @@
 extern crate netencode;
 
-use netencode::{U, T, Tag};
+use netencode::{Tag, T, U};
 
 pub enum Pretty {
     Single {
@@ -20,7 +20,7 @@ pub enum Pretty {
         r#type: char,
         length: String,
         vals: Vec<Pretty>,
-        trailer: char
+        trailer: char,
     },
 }
 
@@ -39,7 +39,7 @@ impl Pretty {
                 r#type: 't',
                 length: format!("{}:", s.len()),
                 val: s.to_string(),
-                trailer: ','
+                trailer: ',',
             },
             U::Binary(s) => Pretty::Single {
                 r#type: 'b',
@@ -47,15 +47,18 @@ impl Pretty {
                 // For pretty printing we want the string to be visible obviously.
                 // Instead of not supporting binary, letโ€™s use lossy conversion.
                 val: String::from_utf8_lossy(s).into_owned(),
-                trailer: ','
+                trailer: ',',
             },
-            U::Sum(Tag{tag, val}) => Self::pretty_tag(tag, Self::from_u(*val)),
+            U::Sum(Tag { tag, val }) => Self::pretty_tag(tag, Self::from_u(*val)),
             U::Record(m) => Pretty::Multi {
                 r#type: '{',
                 // TODO: we are losing the size here, should we recompute it? Keep it?
                 length: String::from(""),
-                vals: m.into_iter().map(|(k, v)| Self::pretty_tag(k, Self::from_u(v))).collect(),
-                trailer: '}'
+                vals: m
+                    .into_iter()
+                    .map(|(k, v)| Self::pretty_tag(k, Self::from_u(v)))
+                    .collect(),
+                trailer: '}',
             },
             U::List(l) => Pretty::Multi {
                 r#type: '[',
@@ -68,13 +71,14 @@ impl Pretty {
     }
 
     fn scalar<D>(r#type: char, length: &str, d: D) -> Pretty
-    where D: std::fmt::Display
+    where
+        D: std::fmt::Display,
     {
         Pretty::Single {
             r#type,
             length: length.to_string(),
             val: format!("{}", d),
-            trailer: ','
+            trailer: ',',
         }
     }
 
@@ -89,43 +93,62 @@ impl Pretty {
     }
 
     pub fn print_multiline<W>(&self, mut w: &mut W) -> std::io::Result<()>
-        where W: std::io::Write
+    where
+        W: std::io::Write,
     {
         Self::go(&mut w, self, 0, true);
         write!(w, "\n")
     }
 
     fn go<W>(mut w: &mut W, p: &Pretty, depth: usize, is_newline: bool) -> std::io::Result<()>
-        where W: std::io::Write
+    where
+        W: std::io::Write,
     {
-        const full : usize = 4;
-        const half : usize = 2;
-        let i = &vec![b' '; depth*full];
-        let iandhalf = &vec![b' '; depth*full + half];
-        let (i, iandhalf) = unsafe {(
-            std::str::from_utf8_unchecked(i),
-            std::str::from_utf8_unchecked(iandhalf),
-        )};
+        const full: usize = 4;
+        const half: usize = 2;
+        let i = &vec![b' '; depth * full];
+        let iandhalf = &vec![b' '; depth * full + half];
+        let (i, iandhalf) = unsafe {
+            (
+                std::str::from_utf8_unchecked(i),
+                std::str::from_utf8_unchecked(iandhalf),
+            )
+        };
         if is_newline {
             write!(&mut w, "{}", i);
         }
         match p {
-            Pretty::Single {r#type, length, val, trailer} =>
-                write!(&mut w, "{} {}{}", r#type, val, trailer),
-            Pretty::Tag { r#type, length, key, inner, val } => {
+            Pretty::Single {
+                r#type,
+                length,
+                val,
+                trailer,
+            } => write!(&mut w, "{} {}{}", r#type, val, trailer),
+            Pretty::Tag {
+                r#type,
+                length,
+                key,
+                inner,
+                val,
+            } => {
                 write!(&mut w, "{} {} {}", r#type, key, inner)?;
                 Self::go::<W>(&mut w, val, depth, false)
-            },
+            }
             // if the length is 0 or 1, we print on one line,
             // only if thereโ€™s more than one element we split the resulting value.
             // we never break lines on arbitrary column sizes, since that is just silly.
-            Pretty::Multi {r#type, length, vals, trailer} => match vals.len() {
+            Pretty::Multi {
+                r#type,
+                length,
+                vals,
+                trailer,
+            } => match vals.len() {
                 0 => write!(&mut w, "{} {}", r#type, trailer),
                 1 => {
                     write!(&mut w, "{} ", r#type);
                     Self::go::<W>(&mut w, &vals[0], depth, false)?;
                     write!(&mut w, "{}", trailer)
-                },
+                }
                 more => {
                     write!(&mut w, "\n{}{} \n", iandhalf, r#type)?;
                     for v in vals {
diff --git a/users/Profpatsch/read-http.rs b/users/Profpatsch/read-http.rs
index 50ff663b994a..efaded87e6cd 100644
--- a/users/Profpatsch/read-http.rs
+++ b/users/Profpatsch/read-http.rs
@@ -1,37 +1,35 @@
-extern crate httparse;
-extern crate netencode;
 extern crate arglib_netencode;
 extern crate ascii;
 extern crate exec_helpers;
+extern crate httparse;
+extern crate netencode;
 
-use std::os::unix::io::FromRawFd;
-use std::io::Read;
-use std::io::Write;
+use exec_helpers::{die_expected_error, die_temporary, die_user_error};
 use std::collections::HashMap;
-use exec_helpers::{die_user_error, die_expected_error, die_temporary};
+use std::io::{Read, Write};
+use std::os::unix::io::FromRawFd;
 
-use netencode::{U, T, dec};
 use netencode::dec::Decoder;
+use netencode::{dec, T, U};
 
 enum What {
     Request,
-    Response
+    Response,
 }
 
 // reads a http request (stdin), and writes all headers to stdout, as netencoded record.
 // The keys are text, but can be lists of text iff headers appear multiple times, so beware.
 fn main() -> std::io::Result<()> {
-
     exec_helpers::no_args("read-http");
 
     let args = dec::RecordDot {
         field: "what",
         inner: dec::OneOf {
             list: vec!["request", "response"],
-            inner: dec::Text
-        }
+            inner: dec::Text,
+        },
     };
-    let what : What = match args.dec(arglib_netencode::arglib_netencode("read-http", None).to_u()) {
+    let what: What = match args.dec(arglib_netencode::arglib_netencode("read-http", None).to_u()) {
         Ok("request") => What::Request,
         Ok("response") => What::Response,
         Ok(v) => panic!("shouldnโ€™t happen!, value was: {}", v),
@@ -39,7 +37,8 @@ fn main() -> std::io::Result<()> {
     };
 
     fn read_stdin_to_complete<F>(mut parse: F) -> ()
-        where F: FnMut(&[u8]) -> httparse::Result<usize>
+    where
+        F: FnMut(&[u8]) -> httparse::Result<usize>,
     {
         let mut res = httparse::Status::Partial;
         loop {
@@ -48,16 +47,22 @@ fn main() -> std::io::Result<()> {
             }
             let mut buf = [0; 2048];
             match std::io::stdin().read(&mut buf[..]) {
-                Ok(size) => if size == 0 {
-                    break;
-                },
-                Err(err) => die_temporary("read-http", format!("could not read from stdin, {:?}", err))
+                Ok(size) => {
+                    if size == 0 {
+                        break;
+                    }
+                }
+                Err(err) => {
+                    die_temporary("read-http", format!("could not read from stdin, {:?}", err))
+                }
             }
             match parse(&buf) {
                 Ok(status) => {
                     res = status;
                 }
-                Err(err) => die_temporary("read-http", format!("httparse parsing failed: {:#?}", err))
+                Err(err) => {
+                    die_temporary("read-http", format!("httparse parsing failed: {:#?}", err))
+                }
             }
         }
     }
@@ -66,7 +71,10 @@ fn main() -> std::io::Result<()> {
         let mut res = HashMap::new();
         for httparse::Header { name, value } in headers {
             let val = ascii::AsciiStr::from_ascii(*value)
-                .expect(&format!("read-http: we require header values to be ASCII, but the header {} was {:?}", name, value))
+                .expect(&format!(
+                    "read-http: we require header values to be ASCII, but the header {} was {:?}",
+                    name, value
+                ))
                 .as_str();
             // lowercase the header names, since the standard doesnโ€™t care
             // and we want unique strings to match against
@@ -77,13 +85,13 @@ fn main() -> std::io::Result<()> {
                     let name_lower = name.to_lowercase();
                     let _ = res.insert(name_lower, U::List(vec![U::Text(t), U::Text(val)]));
                     ()
-                },
+                }
                 Some(U::List(mut l)) => {
                     let name_lower = name.to_lowercase();
                     l.push(U::Text(val));
                     let _ = res.insert(name_lower, U::List(l));
                     ()
-                },
+                }
                 Some(o) => panic!("read-http: header not text nor list: {:?}", o),
             }
         }
@@ -98,12 +106,14 @@ fn main() -> std::io::Result<()> {
             match chonker.next() {
                 Some(Ok(chunk)) => {
                     buf.extend_from_slice(&chunk);
-                    if chunk.windows(4).any(|c| c == b"\r\n\r\n" ) {
+                    if chunk.windows(4).any(|c| c == b"\r\n\r\n") {
                         return Some(());
                     }
-                },
-                Some(Err(err)) => die_temporary("read-http", format!("error reading from stdin: {:?}", err)),
-                None => return None
+                }
+                Some(Err(err)) => {
+                    die_temporary("read-http", format!("error reading from stdin: {:?}", err))
+                }
+                None => return None,
             }
         }
     }
@@ -118,66 +128,97 @@ fn main() -> std::io::Result<()> {
             let mut buf: Vec<u8> = vec![];
             match read_till_end_of_header(&mut buf, stdin.lock()) {
                 Some(()) => match req.parse(&buf) {
-                    Ok(httparse::Status::Complete(_body_start)) => {},
-                    Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"),
-                    Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err))
+                    Ok(httparse::Status::Complete(_body_start)) => {}
+                    Ok(httparse::Status::Partial) => {
+                        die_expected_error("read-http", "httparse should have gotten a full header")
+                    }
+                    Err(err) => die_expected_error(
+                        "read-http",
+                        format!("httparse response parsing failed: {:#?}", err),
+                    ),
                 },
-                None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse request headers"))
+                None => die_expected_error(
+                    "read-http",
+                    format!("httparse end of stdin reached before able to parse request headers"),
+                ),
             }
             let method = req.method.expect("method must be filled on complete parse");
             let path = req.path.expect("path must be filled on complete parse");
             write_dict_req(method, path, &normalize_headers(req.headers))
-        },
+        }
         Response => {
             let mut resp = httparse::Response::new(&mut headers);
             let mut buf: Vec<u8> = vec![];
             match read_till_end_of_header(&mut buf, stdin.lock()) {
                 Some(()) => match resp.parse(&buf) {
-                    Ok(httparse::Status::Complete(_body_start)) => {},
-                    Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"),
-                    Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err))
+                    Ok(httparse::Status::Complete(_body_start)) => {}
+                    Ok(httparse::Status::Partial) => {
+                        die_expected_error("read-http", "httparse should have gotten a full header")
+                    }
+                    Err(err) => die_expected_error(
+                        "read-http",
+                        format!("httparse response parsing failed: {:#?}", err),
+                    ),
                 },
-                None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse response headers"))
+                None => die_expected_error(
+                    "read-http",
+                    format!("httparse end of stdin reached before able to parse response headers"),
+                ),
             }
             let code = resp.code.expect("code must be filled on complete parse");
-            let reason = resp.reason.expect("reason must be filled on complete parse");
+            let reason = resp
+                .reason
+                .expect("reason must be filled on complete parse");
             write_dict_resp(code, reason, &normalize_headers(resp.headers))
         }
     }
 }
 
-fn write_dict_req<'a, 'buf>(method: &'buf str, path: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
-    let mut http = vec![
-        ("method", U::Text(method)),
-        ("path", U::Text(path)),
-    ].into_iter().collect();
+fn write_dict_req<'a, 'buf>(
+    method: &'buf str,
+    path: &'buf str,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
+    let mut http = vec![("method", U::Text(method)), ("path", U::Text(path))]
+        .into_iter()
+        .collect();
     write_dict(http, headers)
 }
 
-fn write_dict_resp<'a, 'buf>(code: u16, reason: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
+fn write_dict_resp<'a, 'buf>(
+    code: u16,
+    reason: &'buf str,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
     let mut http = vec![
         ("status", U::N6(code as u64)),
         ("status-text", U::Text(reason)),
-    ].into_iter().collect();
+    ]
+    .into_iter()
+    .collect();
     write_dict(http, headers)
 }
 
-
-fn write_dict<'buf, 'a>(mut http: HashMap<&str, U<'a>>, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
-    match http.insert("headers", U::Record(
-        headers.iter().map(|(k,v)| (k.as_str(), v.clone())).collect()
-    )) {
+fn write_dict<'buf, 'a>(
+    mut http: HashMap<&str, U<'a>>,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
+    match http.insert(
+        "headers",
+        U::Record(
+            headers
+                .iter()
+                .map(|(k, v)| (k.as_str(), v.clone()))
+                .collect(),
+        ),
+    ) {
         None => (),
         Some(_) => panic!("read-http: headers already in dict"),
     };
-    netencode::encode(
-        &mut std::io::stdout(),
-        &U::Record(http)
-    )?;
+    netencode::encode(&mut std::io::stdout(), &U::Record(http))?;
     Ok(())
 }
 
-
 // iter helper
 
 struct Chunkyboi<T> {
@@ -188,10 +229,7 @@ struct Chunkyboi<T> {
 impl<R: Read> Chunkyboi<R> {
     fn new(inner: R, chunksize: usize) -> Self {
         let buf = vec![0; chunksize];
-        Chunkyboi {
-            inner,
-            buf
-        }
+        Chunkyboi { inner, buf }
     }
 }
 
@@ -205,7 +243,7 @@ impl<R: Read> Iterator for Chunkyboi<R> {
                 // clone a new buffer so we can reuse the internal one
                 Some(Ok(self.buf[..read].to_owned()))
             }
-            Err(err) => Some(Err(err))
+            Err(err) => Some(Err(err)),
         }
     }
 }
diff --git a/users/grfn/achilles/src/commands/eval.rs b/users/grfn/achilles/src/commands/eval.rs
index 61a712c08a8e..efd7399ed1c4 100644
--- a/users/grfn/achilles/src/commands/eval.rs
+++ b/users/grfn/achilles/src/commands/eval.rs
@@ -1,10 +1,6 @@
 use clap::Clap;
 
-use crate::codegen;
-use crate::interpreter;
-use crate::parser;
-use crate::tc;
-use crate::Result;
+use crate::{codegen, interpreter, parser, tc, Result};
 
 /// Evaluate an expression and print its result
 #[derive(Clap)]
diff --git a/users/grfn/achilles/src/parser/expr.rs b/users/grfn/achilles/src/parser/expr.rs
index f596b18970aa..b18ce4a0dc88 100644
--- a/users/grfn/achilles/src/parser/expr.rs
+++ b/users/grfn/achilles/src/parser/expr.rs
@@ -1,9 +1,8 @@
 use std::borrow::Cow;
 
-use nom::alt;
 use nom::character::complete::{digit1, multispace0, multispace1};
 use nom::{
-    call, char, complete, delimited, do_parse, flat_map, many0, map, named, opt, parse_to,
+    alt, call, char, complete, delimited, do_parse, flat_map, many0, map, named, opt, parse_to,
     preceded, separated_list0, separated_list1, tag, tuple,
 };
 use pratt::{Affix, Associativity, PrattParser, Precedence};
diff --git a/users/grfn/xanthous/server/src/main.rs b/users/grfn/xanthous/server/src/main.rs
index ed8f831c7d3f..f93ed6a3a421 100644
--- a/users/grfn/xanthous/server/src/main.rs
+++ b/users/grfn/xanthous/server/src/main.rs
@@ -13,11 +13,8 @@ use futures::Future;
 use metrics_exporter_prometheus::PrometheusBuilder;
 use nix::pty::Winsize;
 use pty::ChildHandle;
-use thrussh::ChannelId;
-use thrussh::{
-    server::{self, Auth, Session},
-    CryptoVec,
-};
+use thrussh::server::{self, Auth, Session};
+use thrussh::{ChannelId, CryptoVec};
 use thrussh_keys::decode_secret_key;
 use thrussh_keys::key::KeyPair;
 use tokio::fs::File;
diff --git a/users/grfn/xanthous/server/src/pty.rs b/users/grfn/xanthous/server/src/pty.rs
index 611130f5bcd9..234ecd8f2336 100644
--- a/users/grfn/xanthous/server/src/pty.rs
+++ b/users/grfn/xanthous/server/src/pty.rs
@@ -6,8 +6,7 @@ use std::task::{Context, Poll};
 
 use eyre::{bail, Result};
 use futures::Future;
-use nix::pty::forkpty;
-use nix::pty::Winsize;
+use nix::pty::{forkpty, Winsize};
 use nix::sys::termios::Termios;
 use nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};
 use nix::unistd::{ForkResult, Pid};
diff --git a/users/riking/adventofcode-2020/day01/src/main.rs b/users/riking/adventofcode-2020/day01/src/main.rs
index 3e6b339d7c2b..e8bc2a05e45a 100644
--- a/users/riking/adventofcode-2020/day01/src/main.rs
+++ b/users/riking/adventofcode-2020/day01/src/main.rs
@@ -6,7 +6,7 @@ use std::io::BufReader;
 const PART_2: bool = true;
 
 fn day01(is_part2: bool, numbers: &Vec<i64>) -> Result<String, anyhow::Error> {
-    //println!("{:?}", numbers);
+    // println!("{:?}", numbers);
 
     for n1 in numbers.iter() {
         for n2 in numbers.iter() {
@@ -50,7 +50,7 @@ fn parse(filename: &str) -> Result<Vec<i64>, anyhow::Error> {
 fn main() -> anyhow::Result<()> {
     let args: Vec<String> = std::env::args().collect();
 
-    //println!("{:?}", args);
+    // println!("{:?}", args);
     if args.len() != 2 {
         return Err(anyhow!("usage: day01 input_file"));
     }
diff --git a/web/atward/build.rs b/web/atward/build.rs
index 5dadba3bf352..90a2f35cd1a9 100644
--- a/web/atward/build.rs
+++ b/web/atward/build.rs
@@ -28,14 +28,19 @@ fn main() {
 
     // Otherwise ask Nix to build it and inject the result.
     let output = Command::new("nix-build")
-        .arg("-A").arg("web.atward.indexHtml")
+        .arg("-A")
+        .arg("web.atward.indexHtml")
         // ... assuming atward is at //web/atward ...
         .arg("../..")
         .output()
         .expect(ERROR_MESSAGE);
 
     if !output.status.success() {
-        eprintln!("{}\nNix output: {}", ERROR_MESSAGE, String::from_utf8_lossy(&output.stderr));
+        eprintln!(
+            "{}\nNix output: {}",
+            ERROR_MESSAGE,
+            String::from_utf8_lossy(&output.stderr)
+        );
         return;
     }
 
diff --git a/web/converse/src/db.rs b/web/converse/src/db.rs
index ae186bdf4e4d..a0d8915504d6 100644
--- a/web/converse/src/db.rs
+++ b/web/converse/src/db.rs
@@ -19,13 +19,13 @@
 //! This module implements the database executor, which holds the
 //! database connection and performs queries on it.
 
+use crate::errors::{ConverseError, Result};
+use crate::models::*;
 use actix::prelude::*;
-use diesel::{self, sql_query};
-use diesel::sql_types::Text;
 use diesel::prelude::*;
-use diesel::r2d2::{Pool, ConnectionManager};
-use crate::models::*;
-use crate::errors::{ConverseError, Result};
+use diesel::r2d2::{ConnectionManager, Pool};
+use diesel::sql_types::Text;
+use diesel::{self, sql_query};
 
 /// Raw PostgreSQL query used to perform full-text search on posts
 /// with a supplied phrase. For now, the query language is hardcoded
@@ -50,14 +50,12 @@ pub struct DbExecutor(pub Pool<ConnectionManager<PgConnection>>);
 
 impl DbExecutor {
     /// Request a list of threads.
-    //
     // TODO(tazjin): This should support pagination.
     pub fn list_threads(&self) -> Result<Vec<ThreadIndex>> {
         use crate::schema::thread_index::dsl::*;
 
         let conn = self.0.get()?;
-        let results = thread_index
-            .load::<ThreadIndex>(&conn)?;
+        let results = thread_index.load::<ThreadIndex>(&conn)?;
         Ok(results)
     }
 
@@ -69,9 +67,7 @@ impl DbExecutor {
 
         let conn = self.0.get()?;
 
-        let opt_user = users
-            .filter(email.eq(email))
-            .first(&conn).optional()?;
+        let opt_user = users.filter(email.eq(email)).first(&conn).optional()?;
 
         if let Some(user) = opt_user {
             Ok(user)
@@ -93,12 +89,11 @@ impl DbExecutor {
 
     /// Fetch a specific thread and return it with its posts.
     pub fn get_thread(&self, thread_id: i32) -> Result<(Thread, Vec<SimplePost>)> {
-        use crate::schema::threads::dsl::*;
         use crate::schema::simple_posts::dsl::id;
+        use crate::schema::threads::dsl::*;
 
         let conn = self.0.get()?;
-        let thread_result: Thread = threads
-            .find(thread_id).first(&conn)?;
+        let thread_result: Thread = threads.find(thread_id).first(&conn)?;
 
         let post_list = SimplePost::belonging_to(&thread_result)
             .order_by(id.asc())
@@ -127,8 +122,7 @@ impl DbExecutor {
 
     /// Create a new thread.
     pub fn create_thread(&self, new_thread: NewThread, post_text: String) -> Result<Thread> {
-                use crate::schema::threads;
-        use crate::schema::posts;
+        use crate::schema::{posts, threads};
 
         let conn = self.0.get()?;
 
@@ -161,20 +155,21 @@ impl DbExecutor {
 
         let closed: bool = {
             use crate::schema::threads::dsl::*;
-            threads.select(closed)
+            threads
+                .select(closed)
                 .find(new_post.thread_id)
                 .first(&conn)?
         };
 
         if closed {
             return Err(ConverseError::ThreadClosed {
-                id: new_post.thread_id
-            })
+                id: new_post.thread_id,
+            });
         }
 
         Ok(diesel::insert_into(posts::table)
-           .values(&new_post)
-           .get_result(&conn)?)
+            .values(&new_post)
+            .get_result(&conn)?)
     }
 
     /// Search for posts.
@@ -197,7 +192,6 @@ impl DbExecutor {
     }
 }
 
-
 // Old actor implementation:
 
 impl Actor for DbExecutor {
@@ -216,9 +210,7 @@ message!(LookupOrCreateUser, Result<User>);
 impl Handler<LookupOrCreateUser> for DbExecutor {
     type Result = <LookupOrCreateUser as Message>::Result;
 
-    fn handle(&mut self,
-              _: LookupOrCreateUser,
-              _: &mut Self::Context) -> Self::Result {
+    fn handle(&mut self, _: LookupOrCreateUser, _: &mut Self::Context) -> Self::Result {
         unimplemented!()
     }
 }
@@ -238,7 +230,9 @@ impl Handler<GetThread> for DbExecutor {
 
 /// Message used to fetch a specific post.
 #[derive(Deserialize, Debug)]
-pub struct GetPost { pub id: i32 }
+pub struct GetPost {
+    pub id: i32,
+}
 
 message!(GetPost, Result<SimplePost>);
 
@@ -296,7 +290,9 @@ impl Handler<CreatePost> for DbExecutor {
 
 /// Message used to search for posts
 #[derive(Deserialize)]
-pub struct SearchPosts { pub query: String }
+pub struct SearchPosts {
+    pub query: String,
+}
 message!(SearchPosts, Result<Vec<SearchResult>>);
 
 impl Handler<SearchPosts> for DbExecutor {
diff --git a/web/converse/src/errors.rs b/web/converse/src/errors.rs
index 32507c51b0c2..a4bd69023b8b 100644
--- a/web/converse/src/errors.rs
+++ b/web/converse/src/errors.rs
@@ -21,17 +21,12 @@
 //! are established in a similar way as was tradition in
 //! `error_chain`, albeit manually.
 
-use std::result;
-use actix_web::{ResponseError, HttpResponse};
 use actix_web::http::StatusCode;
+use actix_web::{HttpResponse, ResponseError};
+use std::result;
 
 // Modules with foreign errors:
-use actix;
-use actix_web;
-use askama;
-use diesel;
-use r2d2;
-use tokio_timer;
+use {actix, actix_web, askama, diesel, r2d2, tokio_timer};
 
 pub type Result<T> = result::Result<T, ConverseError>;
 pub type ConverseResult<T> = result::Result<T, ConverseError>;
@@ -96,7 +91,9 @@ impl From<askama::Error> for ConverseError {
 
 impl From<actix::MailboxError> for ConverseError {
     fn from(error: actix::MailboxError) -> ConverseError {
-        ConverseError::Actix { error: Box::new(error) }
+        ConverseError::Actix {
+            error: Box::new(error),
+        }
     }
 }
 
@@ -136,7 +133,7 @@ impl ResponseError for ConverseError {
                 .header("Location", format!("/thread/{}#post-reply", id))
                 .finish(),
             _ => HttpResponse::build(StatusCode::INTERNAL_SERVER_ERROR)
-                .body(format!("An error occured: {}", self))
+                .body(format!("An error occured: {}", self)),
         }
     }
 }
diff --git a/web/converse/src/handlers.rs b/web/converse/src/handlers.rs
index 0759cec5c146..49f9dcf9745f 100644
--- a/web/converse/src/handlers.rs
+++ b/web/converse/src/handlers.rs
@@ -23,22 +23,22 @@
 //! the tera templates stored in the `/templates` directory in the
 //! project root.
 
-use actix::prelude::*;
-use actix_web::*;
-use actix_web::http::Method;
-use actix_web::middleware::identity::RequestIdentity;
-use actix_web::middleware::{Started, Middleware};
-use actix_web;
 use crate::db::*;
-use crate::errors::{ConverseResult, ConverseError};
-use futures::Future;
+use crate::errors::{ConverseError, ConverseResult};
 use crate::models::*;
 use crate::oidc::*;
 use crate::render::*;
+use actix::prelude::*;
+use actix_web;
+use actix_web::http::Method;
+use actix_web::middleware::identity::RequestIdentity;
+use actix_web::middleware::{Middleware, Started};
+use actix_web::*;
+use futures::Future;
 
 use rouille::{Request, Response};
 
-type ConverseResponse = Box<dyn Future<Item=HttpResponse, Error=ConverseError>>;
+type ConverseResponse = Box<dyn Future<Item = HttpResponse, Error = ConverseError>>;
 
 const HTML: &'static str = "text/html";
 const ANONYMOUS: i32 = 1;
@@ -84,23 +84,31 @@ pub fn get_user_id_rouille(_req: &Request) -> i32 {
     ANONYMOUS
 }
 
-pub fn forum_thread_rouille(req: &Request, db: &DbExecutor, thread_id: i32)
-                            -> ConverseResult<Response> {
+pub fn forum_thread_rouille(
+    req: &Request,
+    db: &DbExecutor,
+    thread_id: i32,
+) -> ConverseResult<Response> {
     let user = get_user_id_rouille(&req);
     let thread = db.get_thread(thread_id)?;
     Ok(Response::html(thread_page(user, thread.0, thread.1)?))
 }
 
 /// This handler retrieves and displays a single forum thread.
-pub fn forum_thread(_: State<AppState>,
-                    _: HttpRequest<AppState>,
-                    _: Path<i32>) -> ConverseResponse {
+pub fn forum_thread(
+    _: State<AppState>,
+    _: HttpRequest<AppState>,
+    _: Path<i32>,
+) -> ConverseResponse {
     unimplemented!()
 }
 
 /// This handler presents the user with the "New Thread" form.
 pub fn new_thread(state: State<AppState>) -> ConverseResponse {
-    state.renderer.send(NewThreadPage::default()).flatten()
+    state
+        .renderer
+        .send(NewThreadPage::default())
+        .flatten()
         .map(|res| HttpResponse::Ok().content_type(HTML).body(res))
         .responder()
 }
@@ -113,9 +121,9 @@ pub struct NewThreadForm {
 
 /// This handler receives a "New thread"-form and redirects the user
 /// to the new thread after creation.
-pub fn submit_thread((state, input, req): (State<AppState>,
-                                           Form<NewThreadForm>,
-                                           HttpRequest<AppState>)) -> ConverseResponse {
+pub fn submit_thread(
+    (state, input, req): (State<AppState>, Form<NewThreadForm>, HttpRequest<AppState>),
+) -> ConverseResponse {
     // Trim whitespace out of inputs:
     let input = NewThreadForm {
         title: input.title.trim().into(),
@@ -124,7 +132,8 @@ pub fn submit_thread((state, input, req): (State<AppState>,
 
     // Perform simple validation and abort here if it fails:
     if input.title.is_empty() || input.post.is_empty() {
-        return state.renderer
+        return state
+            .renderer
             .send(NewThreadPage {
                 alerts: vec![NEW_THREAD_LENGTH_ERR],
                 title: Some(input.title),
@@ -147,14 +156,19 @@ pub fn submit_thread((state, input, req): (State<AppState>,
         post: input.post,
     };
 
-    state.db.send(msg)
+    state
+        .db
+        .send(msg)
         .from_err()
         .and_then(move |res| {
             let thread = res?;
-            info!("Created new thread \"{}\" with ID {}", thread.title, thread.id);
+            info!(
+                "Created new thread \"{}\" with ID {}",
+                thread.title, thread.id
+            );
             Ok(HttpResponse::SeeOther()
-               .header("Location", format!("/thread/{}", thread.id))
-               .finish())
+                .header("Location", format!("/thread/{}", thread.id))
+                .finish())
         })
         .responder()
 }
@@ -167,9 +181,11 @@ pub struct NewPostForm {
 
 /// This handler receives a "Reply"-form and redirects the user to the
 /// new post after creation.
-pub fn reply_thread(state: State<AppState>,
-                    input: Form<NewPostForm>,
-                    req: HttpRequest<AppState>) -> ConverseResponse {
+pub fn reply_thread(
+    state: State<AppState>,
+    input: Form<NewPostForm>,
+    req: HttpRequest<AppState>,
+) -> ConverseResponse {
     let user_id = get_user_id(&req);
 
     let new_post = NewPost {
@@ -178,14 +194,19 @@ pub fn reply_thread(state: State<AppState>,
         body: input.post.trim().into(),
     };
 
-    state.db.send(CreatePost(new_post))
+    state
+        .db
+        .send(CreatePost(new_post))
         .flatten()
         .from_err()
         .and_then(move |post| {
             info!("Posted reply {} to thread {}", post.id, post.thread_id);
             Ok(HttpResponse::SeeOther()
-               .header("Location", format!("/thread/{}#post-{}", post.thread_id, post.id))
-               .finish())
+                .header(
+                    "Location",
+                    format!("/thread/{}#post-{}", post.thread_id, post.id),
+                )
+                .finish())
         })
         .responder()
 }
@@ -194,12 +215,16 @@ pub fn reply_thread(state: State<AppState>,
 /// the user attempts to edit a post that they do not have access to,
 /// they are currently ungracefully redirected back to the post
 /// itself.
-pub fn edit_form(state: State<AppState>,
-                 req: HttpRequest<AppState>,
-                 query: Path<GetPost>) -> ConverseResponse {
+pub fn edit_form(
+    state: State<AppState>,
+    req: HttpRequest<AppState>,
+    query: Path<GetPost>,
+) -> ConverseResponse {
     let user_id = get_user_id(&req);
 
-    state.db.send(query.into_inner())
+    state
+        .db
+        .send(query.into_inner())
         .flatten()
         .from_err()
         .and_then(move |post| {
@@ -227,17 +252,21 @@ pub fn edit_form(state: State<AppState>,
 
 /// This handler "executes" an edit to a post if the current user owns
 /// the edited post.
-pub fn edit_post(state: State<AppState>,
-                 req: HttpRequest<AppState>,
-                 update: Form<UpdatePost>) -> ConverseResponse {
+pub fn edit_post(
+    state: State<AppState>,
+    req: HttpRequest<AppState>,
+    update: Form<UpdatePost>,
+) -> ConverseResponse {
     let user_id = get_user_id(&req);
 
-    state.db.send(GetPost { id: update.post_id })
+    state
+        .db
+        .send(GetPost { id: update.post_id })
         .flatten()
         .from_err()
         .and_then(move |post| {
             if user_id != 1 && post.user_id == user_id {
-                 Ok(())
+                Ok(())
             } else {
                 Err(ConverseError::PostEditForbidden {
                     user: user_id,
@@ -247,24 +276,34 @@ pub fn edit_post(state: State<AppState>,
         })
         .and_then(move |_| state.db.send(update.0).from_err())
         .flatten()
-        .map(|updated| HttpResponse::SeeOther()
-             .header("Location", format!("/thread/{}#post-{}",
-                                         updated.thread_id, updated.id))
-             .finish())
+        .map(|updated| {
+            HttpResponse::SeeOther()
+                .header(
+                    "Location",
+                    format!("/thread/{}#post-{}", updated.thread_id, updated.id),
+                )
+                .finish()
+        })
         .responder()
 }
 
 /// This handler executes a full-text search on the forum database and
 /// displays the results to the user.
-pub fn search_forum(state: State<AppState>,
-                    query: Query<SearchPosts>) -> ConverseResponse {
+pub fn search_forum(state: State<AppState>, query: Query<SearchPosts>) -> ConverseResponse {
     let query_string = query.query.clone();
-    state.db.send(query.into_inner())
+    state
+        .db
+        .send(query.into_inner())
         .flatten()
-        .and_then(move |results| state.renderer.send(SearchResultPage {
-            results,
-            query: query_string,
-        }).from_err())
+        .and_then(move |results| {
+            state
+                .renderer
+                .send(SearchResultPage {
+                    results,
+                    query: query_string,
+                })
+                .from_err()
+        })
         .flatten()
         .map(|res| HttpResponse::Ok().content_type(HTML).body(res))
         .responder()
@@ -272,11 +311,15 @@ pub fn search_forum(state: State<AppState>,
 
 /// This handler initiates an OIDC login.
 pub fn login(state: State<AppState>) -> ConverseResponse {
-    state.oidc.send(GetLoginUrl)
+    state
+        .oidc
+        .send(GetLoginUrl)
         .from_err()
-        .and_then(|url| Ok(HttpResponse::TemporaryRedirect()
-                           .header("Location", url)
-                           .finish()))
+        .and_then(|url| {
+            Ok(HttpResponse::TemporaryRedirect()
+                .header("Location", url)
+                .finish())
+        })
         .responder()
 }
 
@@ -286,21 +329,26 @@ pub fn login(state: State<AppState>) -> ConverseResponse {
 /// provider and a user lookup is performed. If a user with a matching
 /// email-address is found in the database, it is logged in -
 /// otherwise a new user is created.
-pub fn callback(state: State<AppState>,
-                data: Form<CodeResponse>,
-                req: HttpRequest<AppState>) -> ConverseResponse {
-    state.oidc.send(RetrieveToken(data.0)).flatten()
+pub fn callback(
+    state: State<AppState>,
+    data: Form<CodeResponse>,
+    req: HttpRequest<AppState>,
+) -> ConverseResponse {
+    state
+        .oidc
+        .send(RetrieveToken(data.0))
+        .flatten()
         .map(|author| LookupOrCreateUser {
             email: author.email,
             name: author.name,
         })
-        .and_then(move |msg| state.db.send(msg).from_err()).flatten()
+        .and_then(move |msg| state.db.send(msg).from_err())
+        .flatten()
         .and_then(move |user| {
             info!("Completed login for user {} ({})", user.email, user.id);
             req.remember(user.id.to_string());
-            Ok(HttpResponse::SeeOther()
-               .header("Location", "/")
-               .finish())})
+            Ok(HttpResponse::SeeOther().header("Location", "/").finish())
+        })
         .responder()
 }
 
@@ -317,9 +365,7 @@ impl EmbeddedFile for App<AppState> {
     fn static_file(self, path: &'static str, content: &'static [u8]) -> Self {
         self.route(path, Method::GET, move |_: HttpRequest<_>| {
             let mime = format!("{}", mime_guess::from_path(path).first_or_octet_stream());
-            HttpResponse::Ok()
-                .content_type(mime.as_str())
-                .body(content)
+            HttpResponse::Ok().content_type(mime.as_str()).body(content)
         })
     }
 }
@@ -327,7 +373,7 @@ impl EmbeddedFile for App<AppState> {
 /// Middleware used to enforce logins unceremoniously.
 pub struct RequireLogin;
 
-impl <S> Middleware<S> for RequireLogin {
+impl<S> Middleware<S> for RequireLogin {
     fn start(&self, req: &HttpRequest<S>) -> actix_web::Result<Started> {
         let logged_in = req.identity().is_some();
         let is_oidc_req = req.path().starts_with("/oidc");
@@ -336,7 +382,7 @@ impl <S> Middleware<S> for RequireLogin {
             Ok(Started::Response(
                 HttpResponse::SeeOther()
                     .header("Location", "/oidc/login")
-                    .finish()
+                    .finish(),
             ))
         } else {
             Ok(Started::Done)
diff --git a/web/converse/src/main.rs b/web/converse/src/main.rs
index 6d6e9ac71020..78d0241600b4 100644
--- a/web/converse/src/main.rs
+++ b/web/converse/src/main.rs
@@ -30,7 +30,6 @@ extern crate log;
 #[macro_use]
 extern crate serde_derive;
 
-extern crate rouille;
 extern crate actix;
 extern crate actix_web;
 extern crate chrono;
@@ -44,6 +43,7 @@ extern crate md5;
 extern crate mime_guess;
 extern crate r2d2;
 extern crate rand;
+extern crate rouille;
 extern crate serde;
 extern crate serde_json;
 extern crate tokio;
@@ -58,7 +58,7 @@ macro_rules! message {
         impl Message for $t {
             type Result = $r;
         }
-    }
+    };
 }
 
 pub mod db;
@@ -69,18 +69,18 @@ pub mod oidc;
 pub mod render;
 pub mod schema;
 
+use crate::db::*;
+use crate::handlers::*;
+use crate::oidc::OidcExecutor;
+use crate::render::Renderer;
 use actix::prelude::*;
-use actix_web::*;
 use actix_web::http::Method;
+use actix_web::middleware::identity::{CookieIdentityPolicy, IdentityService};
 use actix_web::middleware::Logger;
-use actix_web::middleware::identity::{IdentityService, CookieIdentityPolicy};
-use crate::db::*;
+use actix_web::*;
 use diesel::pg::PgConnection;
 use diesel::r2d2::{ConnectionManager, Pool};
-use crate::handlers::*;
-use crate::oidc::OidcExecutor;
 use rand::{OsRng, Rng};
-use crate::render::Renderer;
 use std::env;
 
 fn config(name: &str) -> String {
@@ -96,16 +96,18 @@ fn start_db_executor() -> Addr<DbExecutor> {
     let db_url = config("DATABASE_URL");
 
     let manager = ConnectionManager::<PgConnection>::new(db_url);
-    let pool = Pool::builder().build(manager).expect("Failed to initialise DB pool");
+    let pool = Pool::builder()
+        .build(manager)
+        .expect("Failed to initialise DB pool");
 
     SyncArbiter::start(2, move || DbExecutor(pool.clone()))
 }
 
 fn schedule_search_refresh(db: Addr<DbExecutor>) {
+    use std::thread;
+    use std::time::{Duration, Instant};
     use tokio::prelude::*;
     use tokio::timer::Interval;
-    use std::time::{Duration, Instant};
-    use std::thread;
 
     let task = Interval::new(Instant::now(), Duration::from_secs(60))
         .from_err()
@@ -118,8 +120,8 @@ fn schedule_search_refresh(db: Addr<DbExecutor>) {
 fn start_oidc_executor(base_url: &str) -> Addr<OidcExecutor> {
     info!("Initialising OIDC integration ...");
     let oidc_url = config("OIDC_DISCOVERY_URL");
-    let oidc_config = oidc::load_oidc(&oidc_url)
-        .expect("Failed to retrieve OIDC discovery document");
+    let oidc_config =
+        oidc::load_oidc(&oidc_url).expect("Failed to retrieve OIDC discovery document");
 
     let oidc = oidc::OidcExecutor {
         oidc_config,
@@ -132,7 +134,7 @@ fn start_oidc_executor(base_url: &str) -> Addr<OidcExecutor> {
 }
 
 fn start_renderer() -> Addr<Renderer> {
-    let comrak = comrak::ComrakOptions{
+    let comrak = comrak::ComrakOptions {
         github_pre_lang: true,
         ext_strikethrough: true,
         ext_table: true,
@@ -143,22 +145,23 @@ fn start_renderer() -> Addr<Renderer> {
         ..Default::default()
     };
 
-    Renderer{ comrak }.start()
+    Renderer { comrak }.start()
 }
 
 fn gen_session_key() -> [u8; 64] {
     let mut key_bytes = [0; 64];
-    let mut rng = OsRng::new()
-        .expect("Failed to retrieve RNG for key generation");
+    let mut rng = OsRng::new().expect("Failed to retrieve RNG for key generation");
     rng.fill_bytes(&mut key_bytes);
 
     key_bytes
 }
 
-fn start_http_server(base_url: String,
-                     db_addr: Addr<DbExecutor>,
-                     oidc_addr: Addr<OidcExecutor>,
-                     renderer_addr: Addr<Renderer>) {
+fn start_http_server(
+    base_url: String,
+    db_addr: Addr<DbExecutor>,
+    oidc_addr: Addr<OidcExecutor>,
+    renderer_addr: Addr<Renderer>,
+) {
     info!("Initialising HTTP server ...");
     let bind_host = config_default("CONVERSE_BIND_HOST", "127.0.0.1:4567");
     let key = gen_session_key();
@@ -175,7 +178,7 @@ fn start_http_server(base_url: String,
             CookieIdentityPolicy::new(&key)
                 .name("converse_auth")
                 .path("/")
-                .secure(base_url.starts_with("https"))
+                .secure(base_url.starts_with("https")),
         );
 
         let app = App::with_state(state)
@@ -183,25 +186,37 @@ fn start_http_server(base_url: String,
             .middleware(identity)
             .resource("/", |r| r.method(Method::GET).with(forum_index))
             .resource("/thread/new", |r| r.method(Method::GET).with(new_thread))
-            .resource("/thread/submit", |r| r.method(Method::POST).with(submit_thread))
-            .resource("/thread/reply", |r| r.method(Method::POST).with(reply_thread))
+            .resource("/thread/submit", |r| {
+                r.method(Method::POST).with(submit_thread)
+            })
+            .resource("/thread/reply", |r| {
+                r.method(Method::POST).with(reply_thread)
+            })
             .resource("/thread/{id}", |r| r.method(Method::GET).with(forum_thread))
             .resource("/post/{id}/edit", |r| r.method(Method::GET).with(edit_form))
             .resource("/post/edit", |r| r.method(Method::POST).with(edit_post))
             .resource("/search", |r| r.method(Method::GET).with(search_forum))
             .resource("/oidc/login", |r| r.method(Method::GET).with(login))
             .resource("/oidc/callback", |r| r.method(Method::POST).with(callback))
-            .static_file("/static/highlight.css", include_bytes!("../static/highlight.css"))
-            .static_file("/static/highlight.js", include_bytes!("../static/highlight.js"))
+            .static_file(
+                "/static/highlight.css",
+                include_bytes!("../static/highlight.css"),
+            )
+            .static_file(
+                "/static/highlight.js",
+                include_bytes!("../static/highlight.js"),
+            )
             .static_file("/static/styles.css", include_bytes!("../static/styles.css"));
 
         if require_login {
             app.middleware(RequireLogin)
         } else {
             app
-        }})
-        .bind(&bind_host).expect(&format!("Could not bind on '{}'", bind_host))
-        .start();
+        }
+    })
+    .bind(&bind_host)
+    .expect(&format!("Could not bind on '{}'", bind_host))
+    .start();
 }
 
 fn main() {
diff --git a/web/converse/src/models.rs b/web/converse/src/models.rs
index da628f78b5bc..63b15fbed061 100644
--- a/web/converse/src/models.rs
+++ b/web/converse/src/models.rs
@@ -16,9 +16,9 @@
 // along with this program. If not, see
 // <https://www.gnu.org/licenses/>.
 
+use crate::schema::{posts, simple_posts, threads, users};
 use chrono::prelude::{DateTime, Utc};
-use crate::schema::{users, threads, posts, simple_posts};
-use diesel::sql_types::{Text, Integer};
+use diesel::sql_types::{Integer, Text};
 
 /// Represents a single user in the Converse database. Converse does
 /// not handle logins itself, but rather looks them up based on the
@@ -85,21 +85,21 @@ pub struct ThreadIndex {
 }
 
 #[derive(Deserialize, Insertable)]
-#[table_name="threads"]
+#[table_name = "threads"]
 pub struct NewThread {
     pub title: String,
     pub user_id: i32,
 }
 
 #[derive(Deserialize, Insertable)]
-#[table_name="users"]
+#[table_name = "users"]
 pub struct NewUser {
     pub email: String,
     pub name: String,
 }
 
 #[derive(Deserialize, Insertable)]
-#[table_name="posts"]
+#[table_name = "posts"]
 pub struct NewPost {
     pub thread_id: i32,
     pub body: String,
diff --git a/web/converse/src/oidc.rs b/web/converse/src/oidc.rs
index 9f566c04a71a..75e3eabc88f2 100644
--- a/web/converse/src/oidc.rs
+++ b/web/converse/src/oidc.rs
@@ -22,12 +22,12 @@
 //! Currently Converse only supports a single OIDC provider. Note that
 //! this has so far only been tested with Office365.
 
-use actix::prelude::*;
 use crate::errors::*;
+use actix::prelude::*;
 use crimp::Request;
+use curl::easy::Form;
 use url::Url;
 use url_serde;
-use curl::easy::Form;
 
 /// This structure represents the contents of an OIDC discovery
 /// document.
@@ -114,20 +114,30 @@ impl Handler<RetrieveToken> for OidcExecutor {
         debug!("Received OAuth2 code, requesting access_token");
 
         let mut form = Form::new();
-        form.part("client_id").contents(&self.client_id.as_bytes())
-            .add().expect("critical error: invalid form data");
-
-        form.part("client_secret").contents(&self.client_secret.as_bytes())
-            .add().expect("critical error: invalid form data");
-
-        form.part("grant_type").contents("authorization_code".as_bytes())
-            .add().expect("critical error: invalid form data");
-
-        form.part("code").contents(&msg.0.code.as_bytes())
-            .add().expect("critical error: invalid form data");
-
-        form.part("redirect_uri").contents(&self.redirect_uri.as_bytes())
-            .add().expect("critical error: invalid form data");
+        form.part("client_id")
+            .contents(&self.client_id.as_bytes())
+            .add()
+            .expect("critical error: invalid form data");
+
+        form.part("client_secret")
+            .contents(&self.client_secret.as_bytes())
+            .add()
+            .expect("critical error: invalid form data");
+
+        form.part("grant_type")
+            .contents("authorization_code".as_bytes())
+            .add()
+            .expect("critical error: invalid form data");
+
+        form.part("code")
+            .contents(&msg.0.code.as_bytes())
+            .add()
+            .expect("critical error: invalid form data");
+
+        form.part("redirect_uri")
+            .contents(&self.redirect_uri.as_bytes())
+            .add()
+            .expect("critical error: invalid form data");
 
         let response = Request::post(&self.oidc_config.token_endpoint)
             .user_agent(concat!("converse-", env!("CARGO_PKG_VERSION")))?
@@ -142,7 +152,8 @@ impl Handler<RetrieveToken> for OidcExecutor {
             .user_agent(concat!("converse-", env!("CARGO_PKG_VERSION")))?
             .header("Authorization", &bearer)?
             .send()?
-            .as_json()?.body;
+            .as_json()?
+            .body;
 
         Ok(Author {
             name: user.name,
diff --git a/web/converse/src/render.rs b/web/converse/src/render.rs
index 749e77ef50eb..d06af12bd9f1 100644
--- a/web/converse/src/render.rs
+++ b/web/converse/src/render.rs
@@ -20,14 +20,14 @@
 //! data into whatever format is needed by the templates and rendering
 //! them.
 
-use actix::prelude::*;
-use askama::Template;
 use crate::errors::*;
-use std::fmt;
-use md5;
 use crate::models::*;
+use actix::prelude::*;
+use askama::Template;
 use chrono::prelude::{DateTime, Utc};
-use comrak::{ComrakOptions, markdown_to_html};
+use comrak::{markdown_to_html, ComrakOptions};
+use md5;
+use std::fmt;
 
 pub struct Renderer {
     pub comrak: ComrakOptions,
@@ -101,7 +101,9 @@ pub enum EditingMode {
 }
 
 impl Default for EditingMode {
-    fn default() -> EditingMode { EditingMode::NewThread }
+    fn default() -> EditingMode {
+        EditingMode::NewThread
+    }
 }
 
 /// This is the template used for rendering the new thread, edit post
@@ -215,19 +217,22 @@ pub fn index_page(threads: Vec<ThreadIndex>) -> Result<String> {
 
 // Render the page of a given thread.
 pub fn thread_page(user: i32, thread: Thread, posts: Vec<SimplePost>) -> Result<String> {
-    let posts = posts.into_iter().map(|post| {
-        let editable = user != 1 && post.user_id == user;
-
-        let comrak = ComrakOptions::default(); // TODO(tazjin): cheddar
-        RenderablePost {
-            id: post.id,
-            body: markdown_to_html(&post.body, &comrak),
-            posted: FormattedDate(post.posted),
-            author_name: post.author_name.clone(),
-            author_gravatar: md5_hex(post.author_email.as_bytes()),
-            editable,
-        }
-    }).collect();
+    let posts = posts
+        .into_iter()
+        .map(|post| {
+            let editable = user != 1 && post.user_id == user;
+
+            let comrak = ComrakOptions::default(); // TODO(tazjin): cheddar
+            RenderablePost {
+                id: post.id,
+                body: markdown_to_html(&post.body, &comrak),
+                posted: FormattedDate(post.posted),
+                author_name: post.author_name.clone(),
+                author_gravatar: md5_hex(post.author_email.as_bytes()),
+                editable,
+            }
+        })
+        .collect();
 
     let renderable = RenderableThreadPage {
         posts,
diff --git a/web/converse/src/schema.rs b/web/converse/src/schema.rs
index 7de6d13668c2..520af4342261 100644
--- a/web/converse/src/schema.rs
+++ b/web/converse/src/schema.rs
@@ -80,9 +80,4 @@ joinable!(posts -> users (user_id));
 joinable!(threads -> users (user_id));
 joinable!(simple_posts -> threads (thread_id));
 
-allow_tables_to_appear_in_same_query!(
-    posts,
-    threads,
-    users,
-    simple_posts,
-);
+allow_tables_to_appear_in_same_query!(posts, threads, users, simple_posts,);