cargo fmt

This commit is contained in:
Magnus Hovland Hoff 2022-04-03 13:47:43 +02:00
parent 26fe2b64da
commit 9f80ced3ec
35 changed files with 1808 additions and 1167 deletions

View file

@ -3,13 +3,15 @@
// 1.3.0. // 1.3.0.
#![allow(proc_macro_derive_resolution_fallback)] #![allow(proc_macro_derive_resolution_fallback)]
#[macro_use] extern crate quote; #[macro_use]
#[macro_use] extern crate diesel; extern crate quote;
#[macro_use]
extern crate diesel;
extern crate diesel_migrations; extern crate diesel_migrations;
extern crate walkdir; extern crate walkdir;
use diesel::Connection;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::Connection;
use std::env; use std::env;
use std::fs::File; use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
@ -26,48 +28,62 @@ mod sqlfunc {
fn main() { fn main() {
let out_dir = env::var("OUT_DIR").expect("cargo must set OUT_DIR"); let out_dir = env::var("OUT_DIR").expect("cargo must set OUT_DIR");
let db_path = Path::new(&out_dir).join("build.db"); let db_path = Path::new(&out_dir).join("build.db");
let db_path = db_path.to_str().expect("Will only work for Unicode-representable paths"); let db_path = db_path
.to_str()
.expect("Will only work for Unicode-representable paths");
let _ignore_failure = std::fs::remove_file(db_path); let _ignore_failure = std::fs::remove_file(db_path);
let connection = SqliteConnection::establish(db_path) let connection = SqliteConnection::establish(db_path).expect(&format!(
.expect(&format!("Error esablishing a database connection to {}", db_path)); "Error esablishing a database connection to {}",
db_path
));
// Integer is a dummy placeholder. Compiling fails when passing (). // Integer is a dummy placeholder. Compiling fails when passing ().
diesel::expression::sql_literal::sql::<(diesel::sql_types::Integer)>("PRAGMA foreign_keys = ON") diesel::expression::sql_literal::sql::<(diesel::sql_types::Integer)>(
.execute(&connection) "PRAGMA foreign_keys = ON",
.expect("Should be able to enable foreign keys"); )
.execute(&connection)
.expect("Should be able to enable foreign keys");
sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() }).unwrap(); sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() })
sqlfunc::theme_from_str_hash::register_impl(&connection, |_: String| -> String { unreachable!() }).unwrap(); .unwrap();
sqlfunc::theme_from_str_hash::register_impl(&connection, |_: String| -> String {
unreachable!()
})
.unwrap();
diesel_migrations::run_pending_migrations(&connection).unwrap(); diesel_migrations::run_pending_migrations(&connection).unwrap();
let infer_schema_path = Path::new(&out_dir).join("infer_schema.rs"); let infer_schema_path = Path::new(&out_dir).join("infer_schema.rs");
let mut file = File::create(infer_schema_path).expect("Unable to open file for writing"); let mut file = File::create(infer_schema_path).expect("Unable to open file for writing");
file.write_all(quote! { file.write_all(
mod __diesel_infer_schema_articles { quote! {
infer_table_from_schema!(#db_path, "articles"); mod __diesel_infer_schema_articles {
} infer_table_from_schema!(#db_path, "articles");
pub use self::__diesel_infer_schema_articles::*; }
pub use self::__diesel_infer_schema_articles::*;
mod __diesel_infer_schema_article_revisions { mod __diesel_infer_schema_article_revisions {
infer_table_from_schema!(#db_path, "article_revisions"); infer_table_from_schema!(#db_path, "article_revisions");
}
pub use self::__diesel_infer_schema_article_revisions::*;
} }
pub use self::__diesel_infer_schema_article_revisions::*; .as_str()
}.as_str().as_bytes()).expect("Unable to write to file"); .as_bytes(),
)
.expect("Unable to write to file");
for entry in WalkDir::new("migrations").into_iter().filter_map(|e| e.ok()) { for entry in WalkDir::new("migrations")
.into_iter()
.filter_map(|e| e.ok())
{
println!("cargo:rerun-if-changed={}", entry.path().display()); println!("cargo:rerun-if-changed={}", entry.path().display());
} }
// For build_config.rs // For build_config.rs
for env_var in &[ for env_var in &["CONTINUOUS_INTEGRATION", "TRAVIS_BRANCH", "TRAVIS_COMMIT"] {
"CONTINUOUS_INTEGRATION",
"TRAVIS_BRANCH",
"TRAVIS_COMMIT",
] {
println!("cargo:rerun-if-env-changed={}", env_var); println!("cargo:rerun-if-env-changed={}", env_var);
} }
} }

View file

@ -1,11 +1,13 @@
#![recursion_limit="128"] #![recursion_limit = "128"]
#[macro_use] extern crate quote; #[macro_use]
#[macro_use] extern crate serde_derive; extern crate quote;
#[macro_use]
extern crate serde_derive;
extern crate base64; extern crate base64;
extern crate proc_macro; extern crate proc_macro;
extern crate serde_json;
extern crate serde; extern crate serde;
extern crate serde_json;
extern crate sha2; extern crate sha2;
extern crate syn; extern crate syn;

View file

@ -2,13 +2,10 @@ use std::fs::File;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote; use quote;
use serde_json;
use serde::de::IgnoredAny; use serde::de::IgnoredAny;
use serde_json;
const SOURCES: &[&str] = &[ const SOURCES: &[&str] = &["src/licenses/license-hound.json", "src/licenses/other.json"];
"src/licenses/license-hound.json",
"src/licenses/other.json",
];
#[derive(Debug, Copy, Clone, Deserialize)] #[derive(Debug, Copy, Clone, Deserialize)]
pub enum LicenseId { pub enum LicenseId {
@ -56,8 +53,12 @@ struct LicenseReport {
impl quote::ToTokens for LicenseReport { impl quote::ToTokens for LicenseReport {
fn to_tokens(&self, tokens: &mut quote::Tokens) { fn to_tokens(&self, tokens: &mut quote::Tokens) {
let c: &LicenseDescription = self.conclusion.as_ref().unwrap(); let c: &LicenseDescription = self.conclusion.as_ref().unwrap();
let (name, link, copyright, license) = let (name, link, copyright, license) = (
(&self.package_name, &c.link, &c.copyright_notice, &c.chosen_license); &self.package_name,
&c.link,
&c.copyright_notice,
&c.chosen_license,
);
let link = match link { let link = match link {
&Some(ref link) => quote! { Some(#link) }, &Some(ref link) => quote! { Some(#link) },
@ -85,7 +86,10 @@ pub fn licenses(_input: TokenStream) -> TokenStream {
.iter() .iter()
.map(|x| -> Vec<LicenseReport> { serde_json::from_reader(File::open(x).unwrap()).unwrap() }) .map(|x| -> Vec<LicenseReport> { serde_json::from_reader(File::open(x).unwrap()).unwrap() })
.map(|x| x.into_iter().filter(|x| x.conclusion.is_ok())) .map(|x| x.into_iter().filter(|x| x.conclusion.is_ok()))
.fold(vec![], |mut a, b| { a.extend(b); a }); .fold(vec![], |mut a, b| {
a.extend(b);
a
});
license_infos.sort_unstable_by_key(|x| x.package_name.to_lowercase()); license_infos.sort_unstable_by_key(|x| x.package_name.to_lowercase());

View file

@ -11,29 +11,28 @@ fn user_crate_root() -> PathBuf {
} }
fn find_attr<'a>(attrs: &'a Vec<syn::Attribute>, name: &str) -> Option<&'a str> { fn find_attr<'a>(attrs: &'a Vec<syn::Attribute>, name: &str) -> Option<&'a str> {
attrs.iter() attrs
.iter()
.find(|&x| x.name() == name) .find(|&x| x.name() == name)
.and_then(|ref attr| match &attr.value { .and_then(|ref attr| match &attr.value {
&syn::MetaItem::NameValue(_, syn::Lit::Str(ref template, _)) => Some(template), &syn::MetaItem::NameValue(_, syn::Lit::Str(ref template, _)) => Some(template),
_ => None _ => None,
}) })
.map(|x| x.as_ref()) .map(|x| x.as_ref())
} }
fn buf_file<P: AsRef<Path>>(filename: P) -> Vec<u8> { fn buf_file<P: AsRef<Path>>(filename: P) -> Vec<u8> {
let mut f = File::open(filename) let mut f = File::open(filename).expect("Unable to open file for reading");
.expect("Unable to open file for reading");
let mut buf = Vec::new(); let mut buf = Vec::new();
f.read_to_end(&mut buf) f.read_to_end(&mut buf).expect("Unable to read file");
.expect("Unable to read file");
buf buf
} }
fn calculate_checksum<P: AsRef<Path>>(filename: P) -> String { fn calculate_checksum<P: AsRef<Path>>(filename: P) -> String {
use base64::*; use base64::*;
use sha2::{Sha256, Digest}; use sha2::{Digest, Sha256};
encode_config(&Sha256::digest(&buf_file(filename)), URL_SAFE) encode_config(&Sha256::digest(&buf_file(filename)), URL_SAFE)
} }
@ -42,23 +41,24 @@ pub fn static_resource(input: TokenStream) -> TokenStream {
let s = input.to_string(); let s = input.to_string();
let ast = syn::parse_macro_input(&s).unwrap(); let ast = syn::parse_macro_input(&s).unwrap();
let filename = find_attr(&ast.attrs, "filename") let filename =
.expect("The `filename` attribute must be specified"); find_attr(&ast.attrs, "filename").expect("The `filename` attribute must be specified");
let abs_filename = user_crate_root().join(filename); let abs_filename = user_crate_root().join(filename);
let abs_filename = abs_filename.to_str().expect("Absolute file path must be valid Unicode"); let abs_filename = abs_filename
.to_str()
.expect("Absolute file path must be valid Unicode");
let checksum = calculate_checksum(&abs_filename); let checksum = calculate_checksum(&abs_filename);
let path: &Path = filename.as_ref(); let path: &Path = filename.as_ref();
let resource_name = let resource_name = format!(
format!("{}-{}.{}", "{}-{}.{}",
path.file_stem().unwrap().to_str().unwrap(), path.file_stem().unwrap().to_str().unwrap(),
checksum, checksum,
path.extension().unwrap().to_str().unwrap() path.extension().unwrap().to_str().unwrap()
); );
let mime = find_attr(&ast.attrs, "mime") let mime = find_attr(&ast.attrs, "mime").expect("The `mime` attribute must be specified");
.expect("The `mime` attribute must be specified");
let name = &ast.ident; let name = &ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();

View file

@ -1,8 +1,8 @@
#[cfg(not(feature="dynamic-assets"))] #[cfg(not(feature = "dynamic-assets"))]
mod static_assets { mod static_assets {
use std::collections::HashMap;
use futures::Future;
use crate::web::{Resource, ResponseFuture}; use crate::web::{Resource, ResponseFuture};
use futures::Future;
use std::collections::HashMap;
// The CSS should be built to a single CSS file at compile time // The CSS should be built to a single CSS file at compile time
#[derive(StaticResource)] #[derive(StaticResource)]
@ -52,31 +52,39 @@ mod static_assets {
} }
} }
#[cfg(not(feature="dynamic-assets"))] #[cfg(not(feature = "dynamic-assets"))]
pub use self::static_assets::*; pub use self::static_assets::*;
#[cfg(feature="dynamic-assets")] #[cfg(feature = "dynamic-assets")]
mod dynamic_assets { mod dynamic_assets {
pub struct ThemesCss; pub struct ThemesCss;
impl ThemesCss { impl ThemesCss {
pub fn resource_name() -> &'static str { "themes.css" } pub fn resource_name() -> &'static str {
"themes.css"
}
} }
pub struct StyleCss; pub struct StyleCss;
impl StyleCss { impl StyleCss {
pub fn resource_name() -> &'static str { "style.css" } pub fn resource_name() -> &'static str {
"style.css"
}
} }
pub struct ScriptJs; pub struct ScriptJs;
impl ScriptJs { impl ScriptJs {
pub fn resource_name() -> &'static str { "script.js" } pub fn resource_name() -> &'static str {
"script.js"
}
} }
pub struct SearchJs; pub struct SearchJs;
impl SearchJs { impl SearchJs {
pub fn resource_name() -> &'static str { "search.js" } pub fn resource_name() -> &'static str {
"search.js"
}
} }
} }
#[cfg(feature="dynamic-assets")] #[cfg(feature = "dynamic-assets")]
pub use self::dynamic_assets::*; pub use self::dynamic_assets::*;

View file

@ -7,7 +7,7 @@ pub const PROJECT_NAME: &str = env!("CARGO_PKG_NAME");
const SOFT_HYPHEN: &str = "\u{00AD}"; const SOFT_HYPHEN: &str = "\u{00AD}";
#[cfg(all(not(debug_assertions), feature="dynamic-assets"))] #[cfg(all(not(debug_assertions), feature = "dynamic-assets"))]
compile_error!("dynamic-assets must not be used for production"); compile_error!("dynamic-assets must not be used for production");
lazy_static! { lazy_static! {
@ -20,7 +20,7 @@ lazy_static! {
#[cfg(test)] #[cfg(test)]
components.push("test".into()); components.push("test".into());
#[cfg(feature="dynamic-assets")] #[cfg(feature = "dynamic-assets")]
components.push("dynamic-assets".into()); components.push("dynamic-assets".into());
if let None = option_env!("CONTINUOUS_INTEGRATION") { if let None = option_env!("CONTINUOUS_INTEGRATION") {
@ -32,14 +32,12 @@ lazy_static! {
} }
if let Some(commit) = option_env!("TRAVIS_COMMIT") { if let Some(commit) = option_env!("TRAVIS_COMMIT") {
components.push(format!("commit:{}", components.push(format!(
"commit:{}",
commit commit
.as_bytes() .as_bytes()
.chunks(4) .chunks(4)
.map(|x| .map(|x| String::from_utf8(x.to_owned()).unwrap_or_else(|_| String::new()))
String::from_utf8(x.to_owned())
.unwrap_or_else(|_| String::new())
)
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(SOFT_HYPHEN) .join(SOFT_HYPHEN)
)); ));
@ -51,7 +49,5 @@ lazy_static! {
env!("CARGO_PKG_VERSION").to_string() env!("CARGO_PKG_VERSION").to_string()
} }
}(); }();
pub static ref HTTP_SERVER: String = format!("{}/{}", PROJECT_NAME, VERSION.as_str());
pub static ref HTTP_SERVER: String =
format!("{}/{}", PROJECT_NAME, VERSION.as_str());
} }

View file

@ -1,5 +1,5 @@
use diesel::prelude::*;
use diesel::expression::sql_literal::sql; use diesel::expression::sql_literal::sql;
use diesel::prelude::*;
use diesel::sql_types::*; use diesel::sql_types::*;
use r2d2::{CustomizeConnection, Pool}; use r2d2::{CustomizeConnection, Pool};
use r2d2_diesel::{self, ConnectionManager}; use r2d2_diesel::{self, ConnectionManager};
@ -25,21 +25,23 @@ impl CustomizeConnection<SqliteConnection, r2d2_diesel::Error> for SqliteInitial
.execute(conn) .execute(conn)
.map_err(|x| r2d2_diesel::Error::QueryError(x))?; .map_err(|x| r2d2_diesel::Error::QueryError(x))?;
sqlfunc::markdown_to_fts::register_impl( sqlfunc::markdown_to_fts::register_impl(conn, |text: String| {
conn, rendering::render_markdown_for_fts(&text)
|text: String| rendering::render_markdown_for_fts(&text) })
).map_err(|x| r2d2_diesel::Error::QueryError(x))?; .map_err(|x| r2d2_diesel::Error::QueryError(x))?;
sqlfunc::theme_from_str_hash::register_impl( sqlfunc::theme_from_str_hash::register_impl(conn, |title: String| {
conn, theme::theme_from_str_hash(&title)
|title: String| theme::theme_from_str_hash(&title) })
).map_err(|x| r2d2_diesel::Error::QueryError(x))?; .map_err(|x| r2d2_diesel::Error::QueryError(x))?;
Ok(()) Ok(())
} }
} }
pub fn create_pool<S: Into<String>>(connection_string: S) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<dyn (::std::error::Error)>> { pub fn create_pool<S: Into<String>>(
connection_string: S,
) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<dyn (::std::error::Error)>> {
let manager = ConnectionManager::<SqliteConnection>::new(connection_string); let manager = ConnectionManager::<SqliteConnection>::new(connection_string);
let pool = Pool::builder() let pool = Pool::builder()
.connection_customizer(Box::new(SqliteInitializer {})) .connection_customizer(Box::new(SqliteInitializer {}))
@ -72,7 +74,10 @@ mod test {
let conn = test_connection(); let conn = test_connection();
#[derive(QueryableByName, PartialEq, Eq, Debug)] #[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row { #[sql_type = "Text"] text: String } struct Row {
#[sql_type = "Text"]
text: String,
}
let res = sql_query("SELECT markdown_to_fts('[link](url)') as text") let res = sql_query("SELECT markdown_to_fts('[link](url)') as text")
.load::<Row>(&conn) .load::<Row>(&conn)
@ -88,7 +93,10 @@ mod test {
let conn = test_connection(); let conn = test_connection();
#[derive(QueryableByName, PartialEq, Eq, Debug)] #[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row { #[sql_type = "Text"] theme: theme::Theme } struct Row {
#[sql_type = "Text"]
theme: theme::Theme,
}
let res = sql_query("SELECT theme_from_str_hash('Bartefjes') as theme") let res = sql_query("SELECT theme_from_str_hash('Bartefjes') as theme")
.load::<Row>(&conn) .load::<Row>(&conn)

View file

@ -1,37 +1,53 @@
#![recursion_limit="128"] // for diesel's infer_schema! #![recursion_limit = "128"]
// for diesel's infer_schema!
// Diesel causes many warnings of the following kind. I expect this to be // Diesel causes many warnings of the following kind. I expect this to be
// fixed in a future release of Diesel. Currently used version of Diesel is // fixed in a future release of Diesel. Currently used version of Diesel is
// 1.3.0. // 1.3.0.
#![allow(proc_macro_derive_resolution_fallback)] #![allow(proc_macro_derive_resolution_fallback)]
#[cfg(test)] #[macro_use] extern crate matches; #[cfg(test)]
#[cfg(test)] #[macro_use] extern crate indoc; #[macro_use]
extern crate matches;
#[cfg(test)]
#[macro_use]
extern crate indoc;
#[macro_use] extern crate bart_derive; #[macro_use]
#[macro_use] extern crate codegen; extern crate bart_derive;
#[macro_use] #[allow(deprecated)] extern crate diesel_infer_schema; #[macro_use]
#[macro_use] extern crate diesel_migrations; extern crate codegen;
#[macro_use] extern crate diesel; #[macro_use]
#[macro_use] extern crate hyper; #[allow(deprecated)]
#[macro_use] extern crate lazy_static; extern crate diesel_infer_schema;
#[macro_use] extern crate maplit; #[macro_use]
#[macro_use] extern crate serde_derive; extern crate diesel_migrations;
#[macro_use] extern crate serde_plain; #[macro_use]
extern crate diesel;
#[macro_use]
extern crate hyper;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate maplit;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_plain;
extern crate chrono; extern crate chrono;
extern crate diff; extern crate diff;
extern crate futures_cpupool;
extern crate futures; extern crate futures;
extern crate futures_cpupool;
extern crate percent_encoding; extern crate percent_encoding;
extern crate pulldown_cmark; extern crate pulldown_cmark;
extern crate r2d2_diesel;
extern crate r2d2; extern crate r2d2;
extern crate r2d2_diesel;
extern crate rand; extern crate rand;
extern crate seahash; extern crate seahash;
extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate serde_urlencoded; extern crate serde_urlencoded;
extern crate serde;
extern crate slug; extern crate slug;
extern crate titlecase; extern crate titlecase;
@ -52,19 +68,22 @@ mod theme;
mod web; mod web;
mod wiki_lookup; mod wiki_lookup;
pub fn main(db_file: String, bind_host: IpAddr, bind_port: u16, trust_identity: bool) -> Result<(), Box<dyn std::error::Error>> { pub fn main(
db_file: String,
bind_host: IpAddr,
bind_port: u16,
trust_identity: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let db_pool = db::create_pool(db_file)?; let db_pool = db::create_pool(db_file)?;
let cpu_pool = futures_cpupool::CpuPool::new_num_cpus(); let cpu_pool = futures_cpupool::CpuPool::new_num_cpus();
let state = state::State::new(db_pool, cpu_pool); let state = state::State::new(db_pool, cpu_pool);
let lookup = wiki_lookup::WikiLookup::new(state, trust_identity); let lookup = wiki_lookup::WikiLookup::new(state, trust_identity);
let server = let server = hyper::server::Http::new()
hyper::server::Http::new() .bind(&SocketAddr::new(bind_host, bind_port), move || {
.bind( Ok(site::Site::new(lookup.clone(), trust_identity))
&SocketAddr::new(bind_host, bind_port), })?;
move || Ok(site::Site::new(lookup.clone(), trust_identity))
)?;
println!("Listening on http://{}", server.local_addr().unwrap()); println!("Listening on http://{}", server.local_addr().unwrap());

View file

@ -1,4 +1,5 @@
#[macro_use] extern crate lazy_static; #[macro_use]
extern crate lazy_static;
extern crate clap; extern crate clap;
extern crate sausagewiki; extern crate sausagewiki;
@ -18,34 +19,44 @@ fn args<'a>() -> clap::ArgMatches<'a> {
App::new(PROJECT_NAME) App::new(PROJECT_NAME)
.version(VERSION.as_str()) .version(VERSION.as_str())
.about(env!("CARGO_PKG_DESCRIPTION")) .about(env!("CARGO_PKG_DESCRIPTION"))
.arg(Arg::with_name(DATABASE) .arg(
.help("Sets the database file to use") Arg::with_name(DATABASE)
.required(true)) .help("Sets the database file to use")
.arg(Arg::with_name(PORT) .required(true),
.help("Sets the listening port") )
.short("p") .arg(
.long(PORT) Arg::with_name(PORT)
.default_value("8080") .help("Sets the listening port")
.validator(|x| match x.parse::<u16>() { .short("p")
Ok(_) => Ok(()), .long(PORT)
Err(_) => Err("Must be an integer in the range [0, 65535]".into()) .default_value("8080")
}) .validator(|x| match x.parse::<u16>() {
.takes_value(true)) Ok(_) => Ok(()),
.arg(Arg::with_name(ADDRESS) Err(_) => Err("Must be an integer in the range [0, 65535]".into()),
.help("Sets the IP address to bind to") })
.short("a") .takes_value(true),
.long(ADDRESS) )
.default_value("127.0.0.1") .arg(
.validator(|x| match x.parse::<IpAddr>() { Arg::with_name(ADDRESS)
Ok(_) => Ok(()), .help("Sets the IP address to bind to")
Err(_) => Err("Must be a valid IP address".into()) .short("a")
}) .long(ADDRESS)
.takes_value(true)) .default_value("127.0.0.1")
.arg(Arg::with_name(TRUST_IDENTITY) .validator(|x| match x.parse::<IpAddr>() {
.help("Trust the value in the X-Identity header to be an \ Ok(_) => Ok(()),
Err(_) => Err("Must be a valid IP address".into()),
})
.takes_value(true),
)
.arg(
Arg::with_name(TRUST_IDENTITY)
.help(
"Trust the value in the X-Identity header to be an \
authenticated username. This only makes sense when Sausagewiki \ authenticated username. This only makes sense when Sausagewiki \
runs behind a reverse proxy which sets this header.") runs behind a reverse proxy which sets this header.",
.long(TRUST_IDENTITY)) )
.long(TRUST_IDENTITY),
)
.get_matches() .get_matches()
} }
@ -55,15 +66,14 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
const CLAP: &str = "Guaranteed by clap"; const CLAP: &str = "Guaranteed by clap";
const VALIDATOR: &str = "Guaranteed by clap validator"; const VALIDATOR: &str = "Guaranteed by clap validator";
let db_file = args.value_of(DATABASE).expect(CLAP).to_owned(); let db_file = args.value_of(DATABASE).expect(CLAP).to_owned();
let bind_host = args.value_of(ADDRESS).expect(CLAP).parse().expect(VALIDATOR); let bind_host = args
.value_of(ADDRESS)
.expect(CLAP)
.parse()
.expect(VALIDATOR);
let bind_port = args.value_of(PORT).expect(CLAP).parse().expect(VALIDATOR); let bind_port = args.value_of(PORT).expect(CLAP).parse().expect(VALIDATOR);
let trust_identity = args.is_present(TRUST_IDENTITY); let trust_identity = args.is_present(TRUST_IDENTITY);
sausagewiki::main( sausagewiki::main(db_file, bind_host, bind_port, trust_identity)
db_file,
bind_host,
bind_port,
trust_identity,
)
} }

View file

@ -1,8 +1,8 @@
use std::fmt::Debug;
use diff; use diff;
use std::fmt::Debug;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct Chunk<'a, Item: 'a + Debug + PartialEq + Copy>( pub struct Chunk<'a, Item: 'a + Debug + PartialEq + Copy>(
pub &'a [diff::Result<Item>], pub &'a [diff::Result<Item>],
pub &'a [diff::Result<Item>] pub &'a [diff::Result<Item>],
); );

View file

@ -7,7 +7,7 @@ use super::chunk::Chunk;
pub struct ChunkIterator<'a, Item> pub struct ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq Item: 'a + Debug + PartialEq,
{ {
left: &'a [diff::Result<Item>], left: &'a [diff::Result<Item>],
right: &'a [diff::Result<Item>], right: &'a [diff::Result<Item>],
@ -15,16 +15,19 @@ where
impl<'a, Item> ChunkIterator<'a, Item> impl<'a, Item> ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq + Eq Item: 'a + Debug + PartialEq + Eq,
{ {
pub fn new(left: &'a [diff::Result<Item>], right: &'a [diff::Result<Item>]) -> ChunkIterator<'a, Item> { pub fn new(
left: &'a [diff::Result<Item>],
right: &'a [diff::Result<Item>],
) -> ChunkIterator<'a, Item> {
ChunkIterator { left, right } ChunkIterator { left, right }
} }
} }
impl<'a, Item> Iterator for ChunkIterator<'a, Item> impl<'a, Item> Iterator for ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq + Copy Item: 'a + Debug + PartialEq + Copy,
{ {
type Item = Chunk<'a, Item>; type Item = Chunk<'a, Item>;
@ -46,18 +49,18 @@ where
match (self.left.get(li), self.right.get(ri)) { match (self.left.get(li), self.right.get(ri)) {
(Some(&Right(_)), _) => { (Some(&Right(_)), _) => {
li += 1; li += 1;
}, }
(_, Some(&Right(_))) => { (_, Some(&Right(_))) => {
ri += 1; ri += 1;
}, }
(Some(&Left(_)), Some(_)) => { (Some(&Left(_)), Some(_)) => {
li += 1; li += 1;
ri += 1; ri += 1;
}, }
(Some(_), Some(&Left(_))) => { (Some(_), Some(&Left(_))) => {
li += 1; li += 1;
ri += 1; ri += 1;
}, }
(Some(&Both(..)), Some(&Both(..))) => { (Some(&Both(..)), Some(&Both(..))) => {
let chunk = Chunk(&self.left[..li], &self.right[..ri]); let chunk = Chunk(&self.left[..li], &self.right[..ri]);
self.left = &self.left[li..]; self.left = &self.left[li..];
@ -94,13 +97,16 @@ mod test {
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![ assert_eq!(
Chunk(&oa[0.. 3], &ob[0.. 3]), vec![
Chunk(&oa[3.. 6], &ob[3.. 3]), Chunk(&oa[0..3], &ob[0..3]),
Chunk(&oa[6.. 9], &ob[3.. 6]), Chunk(&oa[3..6], &ob[3..3]),
Chunk(&oa[9.. 9], &ob[6.. 9]), Chunk(&oa[6..9], &ob[3..6]),
Chunk(&oa[9..12], &ob[9..12]), Chunk(&oa[9..9], &ob[6..9]),
], chunks); Chunk(&oa[9..12], &ob[9..12]),
],
chunks
);
} }
#[test] #[test]
@ -113,11 +119,14 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![ assert_eq!(
Chunk(&oa[0.. 3], &ob[0.. 3]), vec![
Chunk(&oa[3.. 9], &ob[3.. 9]), Chunk(&oa[0..3], &ob[0..3]),
Chunk(&oa[9..12], &ob[9..12]), Chunk(&oa[3..9], &ob[3..9]),
], chunks); Chunk(&oa[9..12], &ob[9..12]),
],
chunks
);
} }
#[test] #[test]
@ -130,10 +139,10 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![ assert_eq!(
Chunk(&oa[0..9], &ob[0.. 9]), vec![Chunk(&oa[0..9], &ob[0..9]), Chunk(&oa[9..9], &ob[9..12]),],
Chunk(&oa[9..9], &ob[9..12]), chunks
], chunks); );
} }
#[test] #[test]
@ -146,10 +155,10 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![ assert_eq!(
Chunk(&oa[0..6], &ob[0.. 6]), vec![Chunk(&oa[0..6], &ob[0..6]), Chunk(&oa[6..9], &ob[6..12]),],
Chunk(&oa[6..9], &ob[6..12]), chunks
], chunks); );
} }
#[test] #[test]
@ -162,8 +171,6 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![ assert_eq!(vec![Chunk(&oa[0..6], &ob[0..6]),], chunks);
Chunk(&oa[0..6], &ob[0..6]),
], chunks);
} }
} }

View file

@ -1,5 +1,5 @@
mod chunk_iterator;
mod chunk; mod chunk;
mod chunk_iterator;
mod output; mod output;
use std::fmt::Debug; use std::fmt::Debug;
@ -7,8 +7,8 @@ use std::fmt::Debug;
use diff; use diff;
use self::chunk_iterator::ChunkIterator; use self::chunk_iterator::ChunkIterator;
use self::output::*;
use self::output::Output::Resolved; use self::output::Output::Resolved;
use self::output::*;
pub use self::output::Output; pub use self::output::Output;
@ -22,9 +22,9 @@ impl<'a> MergeResult<&'a str> {
pub fn to_strings(self) -> MergeResult<String> { pub fn to_strings(self) -> MergeResult<String> {
match self { match self {
MergeResult::Clean(x) => MergeResult::Clean(x), MergeResult::Clean(x) => MergeResult::Clean(x),
MergeResult::Conflicted(x) => MergeResult::Conflicted( MergeResult::Conflicted(x) => {
x.into_iter().map(Output::to_strings).collect() MergeResult::Conflicted(x.into_iter().map(Output::to_strings).collect())
) }
} }
} }
} }
@ -33,23 +33,21 @@ impl MergeResult<String> {
pub fn flatten(self) -> String { pub fn flatten(self) -> String {
match self { match self {
MergeResult::Clean(x) => x, MergeResult::Clean(x) => x,
MergeResult::Conflicted(x) => { MergeResult::Conflicted(x) => x
x.into_iter() .into_iter()
.flat_map(|out| match out { .flat_map(|out| match out {
Output::Conflict(a, _o, b) => { Output::Conflict(a, _o, b) => {
let mut x: Vec<String> = vec![]; let mut x: Vec<String> = vec![];
x.push("<<<<<<< Your changes:\n".into()); x.push("<<<<<<< Your changes:\n".into());
x.extend(a.into_iter().map(|x| format!("{}\n", x))); x.extend(a.into_iter().map(|x| format!("{}\n", x)));
x.push("======= Their changes:\n".into()); x.push("======= Their changes:\n".into());
x.extend(b.into_iter().map(|x| format!("{}\n", x))); x.extend(b.into_iter().map(|x| format!("{}\n", x)));
x.push(">>>>>>> Conflict ends here\n".into()); x.push(">>>>>>> Conflict ends here\n".into());
x x
}, }
Output::Resolved(x) => Output::Resolved(x) => x.into_iter().map(|x| format!("{}\n", x)).collect(),
x.into_iter().map(|x| format!("{}\n", x)).collect(), })
}) .collect(),
.collect()
}
} }
} }
} }
@ -58,22 +56,21 @@ impl MergeResult<char> {
pub fn flatten(self) -> String { pub fn flatten(self) -> String {
match self { match self {
MergeResult::Clean(x) => x, MergeResult::Clean(x) => x,
MergeResult::Conflicted(x) => { MergeResult::Conflicted(x) => x
x.into_iter() .into_iter()
.flat_map(|out| match out { .flat_map(|out| match out {
Output::Conflict(a, _o, b) => { Output::Conflict(a, _o, b) => {
let mut x: Vec<char> = vec![]; let mut x: Vec<char> = vec![];
x.push('<'); x.push('<');
x.extend(a); x.extend(a);
x.push('|'); x.push('|');
x.extend(b); x.extend(b);
x.push('>'); x.push('>');
x x
}, }
Output::Resolved(x) => x, Output::Resolved(x) => x,
}) })
.collect() .collect(),
}
} }
} }
} }
@ -85,7 +82,10 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
let chunks = ChunkIterator::new(&oa, &ob); let chunks = ChunkIterator::new(&oa, &ob);
let hunks: Vec<_> = chunks.map(resolve).collect(); let hunks: Vec<_> = chunks.map(resolve).collect();
let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false }); let clean = hunks.iter().all(|x| match x {
&Resolved(..) => true,
_ => false,
});
if clean { if clean {
MergeResult::Clean( MergeResult::Clean(
@ -93,10 +93,10 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
.into_iter() .into_iter()
.flat_map(|x| match x { .flat_map(|x| match x {
Resolved(y) => y.into_iter(), Resolved(y) => y.into_iter(),
_ => unreachable!() _ => unreachable!(),
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n") .join("\n"),
) )
} else { } else {
MergeResult::Conflicted(hunks) MergeResult::Conflicted(hunks)
@ -110,7 +110,10 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
let chunks = ChunkIterator::new(&oa, &ob); let chunks = ChunkIterator::new(&oa, &ob);
let hunks: Vec<_> = chunks.map(resolve).collect(); let hunks: Vec<_> = chunks.map(resolve).collect();
let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false }); let clean = hunks.iter().all(|x| match x {
&Resolved(..) => true,
_ => false,
});
if clean { if clean {
MergeResult::Clean( MergeResult::Clean(
@ -118,9 +121,9 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
.into_iter() .into_iter()
.flat_map(|x| match x { .flat_map(|x| match x {
Resolved(y) => y.into_iter(), Resolved(y) => y.into_iter(),
_ => unreachable!() _ => unreachable!(),
}) })
.collect() .collect(),
) )
} else { } else {
MergeResult::Conflicted(hunks) MergeResult::Conflicted(hunks)
@ -131,9 +134,9 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
mod test { mod test {
use diff; use diff;
use super::*;
use super::output::*;
use super::output::Output::*; use super::output::Output::*;
use super::output::*;
use super::*;
#[test] #[test]
fn simple_case() { fn simple_case() {
@ -145,106 +148,141 @@ mod test {
chunks.map(resolve).collect() chunks.map(resolve).collect()
} }
assert_eq!(vec![ assert_eq!(
Resolved("aaa".chars().collect()), vec![
Resolved("xxx".chars().collect()), Resolved("aaa".chars().collect()),
Resolved("bbb".chars().collect()), Resolved("xxx".chars().collect()),
Resolved("yyy".chars().collect()), Resolved("bbb".chars().collect()),
Resolved("ccc".chars().collect()), Resolved("yyy".chars().collect()),
], merge_chars( Resolved("ccc".chars().collect()),
"aaaxxxbbbccc", ],
"aaabbbccc", merge_chars("aaaxxxbbbccc", "aaabbbccc", "aaabbbyyyccc",)
"aaabbbyyyccc", );
));
} }
#[test] #[test]
fn clean_case() { fn clean_case() {
assert_eq!(MergeResult::Clean(indoc!(" assert_eq!(
MergeResult::Clean(
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
yyy yyy
ccc ccc
").into()), merge_lines( "
indoc!(" )
.into()
),
merge_lines(
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
bbb bbb
yyy yyy
ccc ccc
"), "
)); ),
)
);
} }
#[test] #[test]
fn clean_case_chars() { fn clean_case_chars() {
assert_eq!(MergeResult::Clean("Title".into()), merge_chars( assert_eq!(
"Titlle", MergeResult::Clean("Title".into()),
"titlle", merge_chars("Titlle", "titlle", "title",)
"title", );
));
} }
#[test] #[test]
fn false_conflict() { fn false_conflict() {
assert_eq!(MergeResult::Clean(indoc!(" assert_eq!(
MergeResult::Clean(
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
").into()), merge_lines( "
indoc!(" )
.into()
),
merge_lines(
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
"), "
)); ),
)
);
} }
#[test] #[test]
fn true_conflict() { fn true_conflict() {
assert_eq!(MergeResult::Conflicted(vec![ assert_eq!(
Resolved(vec!["aaa"]), MergeResult::Conflicted(vec![
Conflict(vec!["xxx"], vec![], vec!["yyy"]), Resolved(vec!["aaa"]),
Resolved(vec!["bbb", "ccc", ""]), Conflict(vec!["xxx"], vec![], vec!["yyy"]),
]), merge_lines( Resolved(vec!["bbb", "ccc", ""]),
indoc!(" ]),
merge_lines(
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
"), "
indoc!(" ),
indoc!(
"
aaa aaa
yyy yyy
bbb bbb
ccc ccc
"), "
)); ),
)
);
} }
} }

View file

@ -47,12 +47,10 @@ fn choose_right<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> {
} }
fn no_change<Item>(operations: &[diff::Result<Item>]) -> bool { fn no_change<Item>(operations: &[diff::Result<Item>]) -> bool {
operations operations.iter().all(|x| match x {
.iter() &Both(..) => true,
.all(|x| match x { _ => false,
&Both(..) => true, })
_ => false,
})
} }
pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>) -> Output<Item> { pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>) -> Output<Item> {
@ -78,83 +76,43 @@ pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>)
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use diff::Result::*;
use super::*; use super::*;
use diff::Result::*;
#[test] #[test]
fn empty() { fn empty() {
assert_eq!( assert_eq!(Output::Resolved(vec![]), resolve::<i32>(Chunk(&[], &[])));
Output::Resolved(vec![]),
resolve::<i32>(Chunk(&[], &[]))
);
} }
#[test] #[test]
fn same() { fn same() {
assert_eq!( assert_eq!(
Output::Resolved(vec![ Output::Resolved(vec![1]),
1 resolve::<i32>(Chunk(&[Both(1, 1)], &[Both(1, 1)]))
]),
resolve::<i32>(Chunk(
&[Both(1, 1)],
&[Both(1, 1)]
))
); );
} }
#[test] #[test]
fn only_left() { fn only_left() {
assert_eq!( assert_eq!(
Output::Resolved(vec![ Output::Resolved(vec![2]),
2 resolve::<i32>(Chunk(&[Left(1), Right(2)], &[]))
]),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[]
))
); );
} }
#[test] #[test]
fn false_conflict() { fn false_conflict() {
assert_eq!( assert_eq!(
Output::Resolved(vec![ Output::Resolved(vec![2]),
2 resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(2)],))
]),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[
Left(1),
Right(2)
],
))
); );
} }
#[test] #[test]
fn real_conflict() { fn real_conflict() {
assert_eq!( assert_eq!(
Output::Conflict( Output::Conflict(vec![2], vec![1], vec![3],),
vec![2], resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(3)],))
vec![1],
vec![3],
),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[
Left(1),
Right(3)
],
))
); );
} }
} }

View file

@ -30,7 +30,9 @@ pub struct ArticleRevision {
} }
impl ArticleRevision { impl ArticleRevision {
pub fn link(&self) -> &str { slug_link(&self.slug) } pub fn link(&self) -> &str {
slug_link(&self.slug)
}
} }
#[derive(Debug, PartialEq, Queryable)] #[derive(Debug, PartialEq, Queryable)]
@ -52,7 +54,9 @@ pub struct ArticleRevisionStub {
} }
impl ArticleRevisionStub { impl ArticleRevisionStub {
pub fn link(&self) -> &str { slug_link(&self.slug) } pub fn link(&self) -> &str {
slug_link(&self.slug)
}
} }
use diesel::sql_types::Text; use diesel::sql_types::Text;
@ -69,5 +73,7 @@ pub struct SearchResult {
} }
impl SearchResult { impl SearchResult {
pub fn link(&self) -> &str { slug_link(&self.slug) } pub fn link(&self) -> &str {
slug_link(&self.slug)
}
} }

View file

@ -1,6 +1,6 @@
use pulldown_cmark::Event::{End, Text};
use pulldown_cmark::{html, Parser, Tag, OPTION_DISABLE_HTML, OPTION_ENABLE_TABLES};
use slug::slugify; use slug::slugify;
use pulldown_cmark::{Parser, Tag, html, OPTION_ENABLE_TABLES, OPTION_DISABLE_HTML};
use pulldown_cmark::Event::{Text, End};
fn slugify_link(text: &str, title: &str) -> Option<(String, String)> { fn slugify_link(text: &str, title: &str) -> Option<(String, String)> {
Some((slugify(text), title.to_owned())) Some((slugify(text), title.to_owned()))
@ -28,8 +28,7 @@ pub fn render_markdown_for_fts(src: &str) -> String {
for event in p { for event in p {
match event { match event {
Text(text) => Text(text) => buf.push_str(&text.replace(is_html_special, " ")),
buf.push_str(&text.replace(is_html_special, " ")),
End(Tag::Link(uri, _title)) => { End(Tag::Link(uri, _title)) => {
buf.push_str(" ("); buf.push_str(" (");
buf.push_str(&uri.replace(is_html_special, " ")); buf.push_str(&uri.replace(is_html_special, " "));

View file

@ -54,13 +54,15 @@ struct LicenseInfo {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/about.html"] #[template = "templates/about.html"]
struct Template<'a> { struct Template<'a> {
deps: &'a [LicenseInfo] deps: &'a [LicenseInfo],
} }
impl<'a> Template<'a> { impl<'a> Template<'a> {
fn version(&self) -> &str { &build_config::VERSION } fn version(&self) -> &str {
&build_config::VERSION
}
} }
impl Resource for AboutResource { impl Resource for AboutResource {
@ -70,24 +72,27 @@ impl Resource for AboutResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let head = self.head(); let head = self.head();
Box::new(head Box::new(head.and_then(move |head| {
.and_then(move |head| { Ok(head.with_body(
Ok(head.with_body(system_page( system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"About Sausagewiki", "About Sausagewiki",
Template { Template {
deps: &*LICENSE_INFOS deps: &*LICENSE_INFOS,
}, },
).to_string())) )
})) .to_string(),
))
}))
} }
} }

View file

@ -1,4 +1,4 @@
use chrono::{TimeZone, DateTime, Local}; use chrono::{DateTime, Local, TimeZone};
use futures::{self, Future}; use futures::{self, Future};
use hyper; use hyper;
use hyper::header::{ContentType, Location}; use hyper::header::{ContentType, Location};
@ -10,7 +10,7 @@ use crate::assets::ScriptJs;
use crate::mimes::*; use crate::mimes::*;
use crate::rendering::render_markdown; use crate::rendering::render_markdown;
use crate::site::Layout; use crate::site::Layout;
use crate::state::{State, UpdateResult, RebaseConflict}; use crate::state::{RebaseConflict, State, UpdateResult};
use crate::theme::{self, Theme}; use crate::theme::{self, Theme};
use crate::web::{Resource, ResponseFuture}; use crate::web::{Resource, ResponseFuture};
@ -22,7 +22,7 @@ struct SelectableTheme {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/article.html"] #[template = "templates/article.html"]
struct Template<'a> { struct Template<'a> {
revision: i32, revision: i32,
last_updated: Option<&'a str>, last_updated: Option<&'a str>,
@ -58,7 +58,12 @@ pub struct ArticleResource {
impl ArticleResource { impl ArticleResource {
pub fn new(state: State, article_id: i32, revision: i32, edit: bool) -> Self { pub fn new(state: State, article_id: i32, revision: i32, edit: bool) -> Self {
Self { state, article_id, revision, edit } Self {
state,
article_id,
revision,
edit,
}
} }
} }
@ -78,12 +83,23 @@ pub fn last_updated(article_id: i32, created: &DateTime<Local>, author: Option<&
Template { Template {
created: &created.to_rfc2822(), created: &created.to_rfc2822(),
article_history: &format!("_changes{}", QueryParameters::default().article_id(Some(article_id)).into_link()), article_history: &format!(
"_changes{}",
QueryParameters::default()
.article_id(Some(article_id))
.into_link()
),
author: author.map(|author| Author { author: author.map(|author| Author {
author: &author, author: &author,
history: format!("_changes{}", QueryParameters::default().author(Some(author.to_owned())).into_link()), history: format!(
"_changes{}",
QueryParameters::default()
.author(Some(author.to_owned()))
.into_link()
),
}), }),
}.to_string() }
.to_string()
} }
impl Resource for ArticleResource { impl Resource for ArticleResource {
@ -93,43 +109,50 @@ impl Resource for ArticleResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let data = self.state.get_article_revision(self.article_id, self.revision) let data = self
.state
.get_article_revision(self.article_id, self.revision)
.map(|x| x.expect("Data model guarantees that this exists")); .map(|x| x.expect("Data model guarantees that this exists"));
let head = self.head(); let head = self.head();
Box::new(data.join(head) Box::new(data.join(head).and_then(move |(data, head)| {
.and_then(move |(data, head)| { Ok(head.with_body(
Ok(head Layout {
.with_body(Layout { base: None, // Hmm, should perhaps accept `base` as argument
base: None, // Hmm, should perhaps accept `base` as argument title: &data.title,
theme: data.theme,
body: &Template {
revision: data.revision,
last_updated: Some(&last_updated(
data.article_id,
&Local.from_utc_datetime(&data.created),
data.author.as_ref().map(|x| &**x),
)),
edit: self.edit,
cancel_url: Some(data.link()),
title: &data.title, title: &data.title,
theme: data.theme, raw: &data.body,
body: &Template { rendered: render_markdown(&data.body),
revision: data.revision, themes: &theme::THEMES
last_updated: Some(&last_updated( .iter()
data.article_id, .map(|&x| SelectableTheme {
&Local.from_utc_datetime(&data.created),
data.author.as_ref().map(|x| &**x)
)),
edit: self.edit,
cancel_url: Some(data.link()),
title: &data.title,
raw: &data.body,
rendered: render_markdown(&data.body),
themes: &theme::THEMES.iter().map(|&x| SelectableTheme {
theme: x, theme: x,
selected: x == data.theme, selected: x == data.theme,
}).collect::<Vec<_>>(), })
}, .collect::<Vec<_>>(),
}.to_string())) },
})) }
.to_string(),
))
}))
} }
fn put(self: Box<Self>, body: hyper::Body, identity: Option<String>) -> ResponseFuture { fn put(self: Box<Self>, body: hyper::Body, identity: Option<String>) -> ResponseFuture {
@ -138,7 +161,7 @@ impl Resource for ArticleResource {
use futures::Stream; use futures::Stream;
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/article_contents.html"] #[template = "templates/article_contents.html"]
struct Template<'a> { struct Template<'a> {
title: &'a str, title: &'a str,
rendered: String, rendered: String,
@ -156,67 +179,79 @@ impl Resource for ArticleResource {
last_updated: &'a str, last_updated: &'a str,
} }
Box::new(body Box::new(
.concat2() body.concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| { .and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
serde_urlencoded::from_bytes(&body) .and_then(move |update: UpdateArticle| {
.map_err(Into::into) self.state.update_article(
}) self.article_id,
.and_then(move |update: UpdateArticle| { update.base_revision,
self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity, update.theme) update.title,
}) update.body,
.and_then(|updated| match updated { identity,
UpdateResult::Success(updated) => update.theme,
Ok(Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone()))
.with_body(serde_json::to_string(&PutResponse {
conflict: false,
slug: &updated.slug,
revision: updated.revision,
title: &updated.title,
body: &updated.body,
theme: updated.theme,
rendered: &Template {
title: &updated.title,
rendered: render_markdown(&updated.body),
}.to_string(),
last_updated: &last_updated(
updated.article_id,
&Local.from_utc_datetime(&updated.created),
updated.author.as_ref().map(|x| &**x)
),
}).expect("Should never fail"))
),
UpdateResult::RebaseConflict(RebaseConflict {
base_article, title, body, theme
}) => {
let title = title.flatten();
let body = body.flatten();
Ok(Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone()))
.with_body(serde_json::to_string(&PutResponse {
conflict: true,
slug: &base_article.slug,
revision: base_article.revision,
title: &title,
body: &body,
theme,
rendered: &Template {
title: &title,
rendered: render_markdown(&body),
}.to_string(),
last_updated: &last_updated(
base_article.article_id,
&Local.from_utc_datetime(&base_article.created),
base_article.author.as_ref().map(|x| &**x)
),
}).expect("Should never fail"))
) )
} })
}) .and_then(|updated| match updated {
UpdateResult::Success(updated) => Ok(Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone()))
.with_body(
serde_json::to_string(&PutResponse {
conflict: false,
slug: &updated.slug,
revision: updated.revision,
title: &updated.title,
body: &updated.body,
theme: updated.theme,
rendered: &Template {
title: &updated.title,
rendered: render_markdown(&updated.body),
}
.to_string(),
last_updated: &last_updated(
updated.article_id,
&Local.from_utc_datetime(&updated.created),
updated.author.as_ref().map(|x| &**x),
),
})
.expect("Should never fail"),
)),
UpdateResult::RebaseConflict(RebaseConflict {
base_article,
title,
body,
theme,
}) => {
let title = title.flatten();
let body = body.flatten();
Ok(Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone()))
.with_body(
serde_json::to_string(&PutResponse {
conflict: true,
slug: &base_article.slug,
revision: base_article.revision,
title: &title,
body: &body,
theme,
rendered: &Template {
title: &title,
rendered: render_markdown(&body),
}
.to_string(),
last_updated: &last_updated(
base_article.article_id,
&Local.from_utc_datetime(&base_article.created),
base_article.author.as_ref().map(|x| &**x),
),
})
.expect("Should never fail"),
))
}
}),
) )
} }
@ -225,58 +260,67 @@ impl Resource for ArticleResource {
use futures::Stream; use futures::Stream;
Box::new(body Box::new(
.concat2() body.concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| { .and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
serde_urlencoded::from_bytes(&body) .and_then(move |update: UpdateArticle| {
.map_err(Into::into) self.state.update_article(
}) self.article_id,
.and_then(move |update: UpdateArticle| { update.base_revision,
self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity, update.theme) update.title,
}) update.body,
.and_then(|updated| { identity,
match updated { update.theme,
)
})
.and_then(|updated| match updated {
UpdateResult::Success(updated) => Ok(Response::new() UpdateResult::Success(updated) => Ok(Response::new()
.with_status(hyper::StatusCode::SeeOther) .with_status(hyper::StatusCode::SeeOther)
.with_header(ContentType(TEXT_PLAIN.clone())) .with_header(ContentType(TEXT_PLAIN.clone()))
.with_header(Location::new(updated.link().to_owned())) .with_header(Location::new(updated.link().to_owned()))
.with_body("See other") .with_body("See other")),
),
UpdateResult::RebaseConflict(RebaseConflict { UpdateResult::RebaseConflict(RebaseConflict {
base_article, title, body, theme base_article,
title,
body,
theme,
}) => { }) => {
let title = title.flatten(); let title = title.flatten();
let body = body.flatten(); let body = body.flatten();
Ok(Response::new() Ok(Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body(Layout { .with_body(
base: None, Layout {
title: &title, base: None,
theme,
body: &Template {
revision: base_article.revision,
last_updated: Some(&last_updated(
base_article.article_id,
&Local.from_utc_datetime(&base_article.created),
base_article.author.as_ref().map(|x| &**x)
)),
edit: true,
cancel_url: Some(base_article.link()),
title: &title, title: &title,
raw: &body, theme,
rendered: render_markdown(&body), body: &Template {
themes: &theme::THEMES.iter().map(|&x| SelectableTheme { revision: base_article.revision,
theme: x, last_updated: Some(&last_updated(
selected: x == theme, base_article.article_id,
}).collect::<Vec<_>>(), &Local.from_utc_datetime(&base_article.created),
}, base_article.author.as_ref().map(|x| &**x),
}.to_string()) )),
) edit: true,
cancel_url: Some(base_article.link()),
title: &title,
raw: &body,
rendered: render_markdown(&body),
themes: &theme::THEMES
.iter()
.map(|&x| SelectableTheme {
theme: x,
selected: x == theme,
})
.collect::<Vec<_>>(),
},
}
.to_string(),
))
} }
} }),
})
) )
} }
} }

View file

@ -1,4 +1,4 @@
use chrono::{TimeZone, DateTime, Local}; use chrono::{DateTime, Local, TimeZone};
use futures::{self, Future}; use futures::{self, Future};
use hyper; use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
@ -24,7 +24,12 @@ impl ArticleRevisionResource {
} }
} }
pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &DateTime<Local>, author: Option<&str>) -> String { pub fn timestamp_and_author(
sequence_number: i32,
article_id: i32,
created: &DateTime<Local>,
author: Option<&str>,
) -> String {
struct Author<'a> { struct Author<'a> {
author: &'a str, author: &'a str,
history: String, history: String,
@ -42,7 +47,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
Template { Template {
created: &created.to_rfc2822(), created: &created.to_rfc2822(),
article_history: &format!("_changes{}", article_history: &format!(
"_changes{}",
QueryParameters::default() QueryParameters::default()
.pagination(pagination) .pagination(pagination)
.article_id(Some(article_id)) .article_id(Some(article_id))
@ -50,7 +56,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
), ),
author: author.map(|author| Author { author: author.map(|author| Author {
author: &author, author: &author,
history: format!("_changes{}", history: format!(
"_changes{}",
QueryParameters::default() QueryParameters::default()
.pagination(pagination) .pagination(pagination)
.article_id(Some(article_id)) .article_id(Some(article_id))
@ -58,7 +65,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
.into_link() .into_link()
), ),
}), }),
}.to_string() }
.to_string()
} }
impl Resource for ArticleRevisionResource { impl Resource for ArticleRevisionResource {
@ -68,15 +76,16 @@ impl Resource for ArticleRevisionResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/article_revision.html"] #[template = "templates/article_revision.html"]
struct Template<'a> { struct Template<'a> {
link_current: &'a str, link_current: &'a str,
timestamp_and_author: &'a str, timestamp_and_author: &'a str,
@ -87,9 +96,9 @@ impl Resource for ArticleRevisionResource {
let head = self.head(); let head = self.head();
let data = self.data; let data = self.data;
Box::new(head Box::new(head.and_then(move |head| {
.and_then(move |head| Ok(head.with_body(
Ok(head.with_body(system_page( system_page(
Some("../../"), // Hmm, should perhaps accept `base` as argument Some("../../"), // Hmm, should perhaps accept `base` as argument
&data.title, &data.title,
&Template { &Template {
@ -98,23 +107,25 @@ impl Resource for ArticleRevisionResource {
data.sequence_number, data.sequence_number,
data.article_id, data.article_id,
&Local.from_utc_datetime(&data.created), &Local.from_utc_datetime(&data.created),
data.author.as_ref().map(|x| &**x) data.author.as_ref().map(|x| &**x),
), ),
diff_link: diff_link: if data.revision > 1 {
if data.revision > 1 { Some(format!(
Some(format!("_diff/{}?{}", "_diff/{}?{}",
data.article_id, data.article_id,
diff_resource::QueryParameters::new( diff_resource::QueryParameters::new(
data.revision as u32 - 1, data.revision as u32 - 1,
data.revision as u32, data.revision as u32,
) )
)) ))
} else { } else {
None None
}, },
rendered: render_markdown(&data.body), rendered: render_markdown(&data.body),
}, },
).to_string())) )
.to_string(),
)) ))
}))
} }
} }

View file

@ -1,6 +1,6 @@
use diesel; use diesel;
use futures::{self, Future};
use futures::future::{done, finished}; use futures::future::{done, finished};
use futures::{self, Future};
use hyper; use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
@ -40,8 +40,16 @@ pub struct QueryParameters {
impl QueryParameters { impl QueryParameters {
pub fn pagination(self, pagination: Pagination<i32>) -> Self { pub fn pagination(self, pagination: Pagination<i32>) -> Self {
Self { Self {
after: if let Pagination::After(x) = pagination { Some(x) } else { None }, after: if let Pagination::After(x) = pagination {
before: if let Pagination::Before(x) = pagination { Some(x) } else { None }, Some(x)
} else {
None
},
before: if let Pagination::Before(x) = pagination {
Some(x)
} else {
None
},
..self ..self
} }
} }
@ -56,7 +64,11 @@ impl QueryParameters {
pub fn limit(self, limit: i32) -> Self { pub fn limit(self, limit: i32) -> Self {
Self { Self {
limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None }, limit: if limit != DEFAULT_LIMIT {
Some(limit)
} else {
None
},
..self ..self
} }
} }
@ -76,9 +88,7 @@ fn apply_query_config<'a>(
article_id: Option<i32>, article_id: Option<i32>,
author: Option<String>, author: Option<String>,
limit: i32, limit: i32,
) ) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite> {
-> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>
{
use diesel::prelude::*; use diesel::prelude::*;
if let Some(article_id) = article_id { if let Some(article_id) = article_id {
@ -94,10 +104,16 @@ fn apply_query_config<'a>(
impl ChangesLookup { impl ChangesLookup {
pub fn new(state: State, show_authors: bool) -> ChangesLookup { pub fn new(state: State, show_authors: bool) -> ChangesLookup {
Self { state, show_authors } Self {
state,
show_authors,
}
} }
pub fn lookup(&self, query: Option<&str>) -> Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>> { pub fn lookup(
&self,
query: Option<&str>,
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
use super::pagination; use super::pagination;
let state = self.state.clone(); let state = self.state.clone();
@ -117,48 +133,74 @@ impl ChangesLookup {
Ok((pagination, params.article_id, params.author, limit)) Ok((pagination, params.article_id, params.author, limit))
})()) })())
.and_then(move |(pagination, article_id, author, limit)| match pagination { .and_then(move |(pagination, article_id, author, limit)| {
Pagination::After(x) => { match pagination {
let author2 = author.clone(); Pagination::After(x) => {
let author2 = author.clone();
Box::new(state.query_article_revision_stubs(move |query| { Box::new(
use diesel::prelude::*; state
.query_article_revision_stubs(move |query| {
use diesel::prelude::*;
apply_query_config(query, article_id, author2, limit) apply_query_config(query, article_id, author2, limit)
.filter(article_revisions::sequence_number.gt(x)) .filter(article_revisions::sequence_number.gt(x))
.order(article_revisions::sequence_number.asc()) .order(article_revisions::sequence_number.asc())
}).and_then(move |mut data| { })
let extra_element = if data.len() > limit as usize { .and_then(move |mut data| {
data.pop() let extra_element = if data.len() > limit as usize {
} else { data.pop()
None } else {
}; None
};
let args = let args = QueryParameters {
QueryParameters { after: None,
after: None, before: None,
before: None, article_id,
article_id, author,
author, limit: None,
limit: None, }
} .limit(limit);
.limit(limit);
Ok(Some(match extra_element { Ok(Some(match extra_element {
Some(x) => Box::new(TemporaryRedirectResource::new( Some(x) => Box::new(TemporaryRedirectResource::new(
args args.pagination(Pagination::Before(x.sequence_number))
.pagination(Pagination::Before(x.sequence_number)) .into_link(),
.into_link() ))
)) as BoxResource, as BoxResource,
None => Box::new(TemporaryRedirectResource::new( None => Box::new(TemporaryRedirectResource::new(
args.into_link() args.into_link(),
)) as BoxResource, ))
})) as BoxResource,
})) as Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>> }))
}, }),
Pagination::Before(x) => Box::new(finished(Some(Box::new(ChangesResource::new(state, show_authors, Some(x), article_id, author, limit)) as BoxResource))), )
Pagination::None => Box::new(finished(Some(Box::new(ChangesResource::new(state, show_authors, None, article_id, author, limit)) as BoxResource))), as Box<
}) dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>,
>
}
Pagination::Before(x) => {
Box::new(finished(Some(Box::new(ChangesResource::new(
state,
show_authors,
Some(x),
article_id,
author,
limit,
)) as BoxResource)))
}
Pagination::None => Box::new(finished(Some(Box::new(ChangesResource::new(
state,
show_authors,
None,
article_id,
author,
limit,
))
as BoxResource))),
}
}),
) )
} }
} }
@ -173,8 +215,22 @@ pub struct ChangesResource {
} }
impl ChangesResource { impl ChangesResource {
pub fn new(state: State, show_authors: bool, before: Option<i32>, article_id: Option<i32>, author: Option<String>, limit: i32) -> Self { pub fn new(
Self { state, show_authors, before, article_id, author, limit } state: State,
show_authors: bool,
before: Option<i32>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
) -> Self {
Self {
state,
show_authors,
before,
article_id,
author,
limit,
}
} }
fn query_args(&self) -> QueryParameters { fn query_args(&self) -> QueryParameters {
@ -196,14 +252,15 @@ impl Resource for ChangesResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
use chrono::{TimeZone, Local}; use chrono::{Local, TimeZone};
struct Row<'a> { struct Row<'a> {
resource: &'a ChangesResource, resource: &'a ChangesResource,
@ -224,7 +281,8 @@ impl Resource for ChangesResource {
impl<'a> Row<'a> { impl<'a> Row<'a> {
fn author_link(&self) -> String { fn author_link(&self) -> String {
self.resource.query_args() self.resource
.query_args()
.pagination(Pagination::After(self.sequence_number)) .pagination(Pagination::After(self.sequence_number))
.author(self.author.clone()) .author(self.author.clone())
.into_link() .into_link()
@ -237,7 +295,7 @@ impl Resource for ChangesResource {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/changes.html"] #[template = "templates/changes.html"]
struct Template<'a> { struct Template<'a> {
resource: &'a ChangesResource, resource: &'a ChangesResource,
@ -260,24 +318,25 @@ impl Resource for ChangesResource {
} }
fn all_articles_link(&self) -> Option<String> { fn all_articles_link(&self) -> Option<String> {
self.resource.article_id.map(|_| { self.resource
self.resource.query_args() .article_id
.article_id(None) .map(|_| self.resource.query_args().article_id(None).into_link())
.into_link()
})
} }
fn all_authors_link(&self) -> Option<String> { fn all_authors_link(&self) -> Option<String> {
self.resource.author.as_ref().map(|_| { self.resource
self.resource.query_args() .author
.author(None) .as_ref()
.into_link() .map(|_| self.resource.query_args().author(None).into_link())
})
} }
} }
let (before, article_id, author, limit) = let (before, article_id, author, limit) = (
(self.before.clone(), self.article_id.clone(), self.author.clone(), self.limit); self.before.clone(),
self.article_id.clone(),
self.author.clone(),
self.limit,
);
let data = self.state.query_article_revision_stubs(move |query| { let data = self.state.query_article_revision_stubs(move |query| {
use diesel::prelude::*; use diesel::prelude::*;
@ -292,67 +351,79 @@ impl Resource for ChangesResource {
let head = self.head(); let head = self.head();
Box::new(data.join(head) Box::new(data.join(head).and_then(move |(mut data, head)| {
.and_then(move |(mut data, head)| { use std::iter::Iterator;
use std::iter::Iterator;
let extra_element = if data.len() > self.limit as usize { let extra_element = if data.len() > self.limit as usize {
data.pop() data.pop()
} else { } else {
None None
}; };
let (newer, older) = match self.before { let (newer, older) = match self.before {
Some(x) => ( Some(x) => (
Some(NavLinks { Some(NavLinks {
more: self.query_args().pagination(Pagination::After(x-1)).into_link(), more: self
end: self.query_args().pagination(Pagination::None).into_link(), .query_args()
}), .pagination(Pagination::After(x - 1))
extra_element.map(|_| NavLinks { .into_link(),
more: self.query_args() end: self.query_args().pagination(Pagination::None).into_link(),
.pagination(Pagination::Before(data.last().unwrap().sequence_number)) }),
.into_link(), extra_element.map(|_| NavLinks {
end: self.query_args().pagination(Pagination::After(0)).into_link(), more: self
}) .query_args()
), .pagination(Pagination::Before(data.last().unwrap().sequence_number))
None => ( .into_link(),
None, end: self
extra_element.map(|_| NavLinks { .query_args()
more: self.query_args() .pagination(Pagination::After(0))
.pagination(Pagination::Before(data.last().unwrap().sequence_number)) .into_link(),
.into_link(), }),
end: self.query_args().pagination(Pagination::After(0)).into_link(), ),
}), None => (
), None,
}; extra_element.map(|_| NavLinks {
more: self
.query_args()
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
.into_link(),
end: self
.query_args()
.pagination(Pagination::After(0))
.into_link(),
}),
),
};
let changes = &data.into_iter().map(|x| { let changes = &data
Row { .into_iter()
resource: &self, .map(|x| Row {
sequence_number: x.sequence_number, resource: &self,
article_id: x.article_id, sequence_number: x.sequence_number,
revision: x.revision, article_id: x.article_id,
created: Local.from_utc_datetime(&x.created).to_rfc2822(), revision: x.revision,
author: x.author, created: Local.from_utc_datetime(&x.created).to_rfc2822(),
_slug: x.slug, author: x.author,
title: x.title, _slug: x.slug,
_latest: x.latest, title: x.title,
diff_link: _latest: x.latest,
if x.revision > 1 { diff_link: if x.revision > 1 {
Some(format!("_diff/{}?{}", Some(format!(
x.article_id, "_diff/{}?{}",
diff_resource::QueryParameters::new( x.article_id,
x.revision as u32 - 1, diff_resource::QueryParameters::new(
x.revision as u32, x.revision as u32 - 1,
) x.revision as u32,
)) )
} else { ))
None } else {
}, None
} },
}).collect::<Vec<_>>(); })
.collect::<Vec<_>>();
Ok(head.with_body(system_page( Ok(head.with_body(
system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"Changes", "Changes",
Template { Template {
@ -360,9 +431,11 @@ impl Resource for ChangesResource {
show_authors: self.show_authors, show_authors: self.show_authors,
newer, newer,
older, older,
changes changes,
} },
).to_string())) )
})) .to_string(),
))
}))
} }
} }

View file

@ -1,8 +1,8 @@
use std::fmt; use std::fmt;
use diff; use diff;
use futures::{self, Future};
use futures::future::done; use futures::future::done;
use futures::{self, Future};
use hyper; use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
@ -48,25 +48,28 @@ impl DiffLookup {
Self { state } Self { state }
} }
pub fn lookup(&self, article_id: u32, query: Option<&str>) -> Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>> { pub fn lookup(
&self,
article_id: u32,
query: Option<&str>,
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
let state = self.state.clone(); let state = self.state.clone();
Box::new(done( Box::new(
serde_urlencoded::from_str(query.unwrap_or("")) done(serde_urlencoded::from_str(query.unwrap_or("")).map_err(Into::into))
.map_err(Into::into) .and_then(move |params: QueryParameters| {
).and_then(move |params: QueryParameters| { let from = state.get_article_revision(article_id as i32, params.from as i32);
let from = state.get_article_revision(article_id as i32, params.from as i32); let to = state.get_article_revision(article_id as i32, params.to as i32);
let to = state.get_article_revision(article_id as i32, params.to as i32);
from.join(to) from.join(to)
}).and_then(move |(from, to)| { })
match (from, to) { .and_then(move |(from, to)| match (from, to) {
(Some(from), Some(to)) => (Some(from), Some(to)) => {
Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource)), Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource))
_ => }
Ok(None), _ => Ok(None),
} }),
})) )
} }
} }
@ -89,9 +92,10 @@ impl Resource for DiffResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
@ -119,64 +123,88 @@ impl Resource for DiffResource {
let head = self.head(); let head = self.head();
Box::new(head Box::new(head.and_then(move |head| {
.and_then(move |head| { let consecutive = self.to.revision - self.from.revision == 1;
let consecutive = self.to.revision - self.from.revision == 1;
let author = match consecutive { let author = match consecutive {
true => self.to.author.as_ref().map(|x| &**x), true => self.to.author.as_ref().map(|x| &**x),
false => None, false => None,
}; };
let author_link = &format!("_changes{}", let author_link = &format!(
changes_resource::QueryParameters::default() "_changes{}",
.author(author.map(|x| x.to_owned())) changes_resource::QueryParameters::default()
.pagination(Pagination::After(self.from.sequence_number)) .author(author.map(|x| x.to_owned()))
.into_link() .pagination(Pagination::After(self.from.sequence_number))
); .into_link()
);
let article_history_link = &format!("_changes{}", let article_history_link = &format!(
changes_resource::QueryParameters::default() "_changes{}",
.article_id(Some(self.from.article_id)) changes_resource::QueryParameters::default()
.pagination(Pagination::After(self.from.sequence_number)) .article_id(Some(self.from.article_id))
.into_link() .pagination(Pagination::After(self.from.sequence_number))
); .into_link()
);
let title = &diff::chars(&self.from.title, &self.to.title) let title = &diff::chars(&self.from.title, &self.to.title)
.into_iter() .into_iter()
.map(|x| match x { .map(|x| match x {
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() }, diff::Result::Left(x) => Diff {
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() }, removed: Some(x),
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() }, ..Default::default()
}) },
.collect::<Vec<_>>(); diff::Result::Both(x, _) => Diff {
same: Some(x),
..Default::default()
},
diff::Result::Right(x) => Diff {
added: Some(x),
..Default::default()
},
})
.collect::<Vec<_>>();
let lines = &diff::lines(&self.from.body, &self.to.body) let lines = &diff::lines(&self.from.body, &self.to.body)
.into_iter() .into_iter()
.map(|x| match x { .map(|x| match x {
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() }, diff::Result::Left(x) => Diff {
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() }, removed: Some(x),
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() }, ..Default::default()
}) },
.collect::<Vec<_>>(); diff::Result::Both(x, _) => Diff {
same: Some(x),
..Default::default()
},
diff::Result::Right(x) => Diff {
added: Some(x),
..Default::default()
},
})
.collect::<Vec<_>>();
Ok(head Ok(head.with_body(
.with_body(Layout { Layout {
base: Some("../"), // Hmm, should perhaps accept `base` as argument base: Some("../"), // Hmm, should perhaps accept `base` as argument
title: "Difference", title: "Difference",
theme: theme::theme_from_str_hash("Difference"), theme: theme::theme_from_str_hash("Difference"),
body: &Template { body: &Template {
consecutive, consecutive,
article_id: self.from.article_id as u32, article_id: self.from.article_id as u32,
author, author,
author_link, author_link,
article_history_link, article_history_link,
from_link: &format!("_revisions/{}/{}", self.from.article_id, self.from.revision), from_link: &format!(
to_link: &format!("_revisions/{}/{}", self.to.article_id, self.to.revision), "_revisions/{}/{}",
title, self.from.article_id, self.from.revision
lines, ),
}, to_link: &format!("_revisions/{}/{}", self.to.article_id, self.to.revision),
}.to_string())) title,
})) lines,
},
}
.to_string(),
))
}))
} }
} }

View file

@ -15,7 +15,11 @@ pub struct HtmlResource {
impl HtmlResource { impl HtmlResource {
pub fn new(base: Option<&'static str>, title: &'static str, html_body: &'static str) -> Self { pub fn new(base: Option<&'static str>, title: &'static str, html_body: &'static str) -> Self {
HtmlResource { base, title, html_body } HtmlResource {
base,
title,
html_body,
}
} }
} }
@ -26,22 +30,18 @@ impl Resource for HtmlResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let head = self.head(); let head = self.head();
Box::new(head Box::new(head.and_then(move |head| {
.and_then(move |head| { Ok(head.with_body(system_page(self.base, self.title, self.html_body).to_string()))
Ok(head.with_body(system_page( }))
self.base,
self.title,
self.html_body
).to_string()))
}))
} }
} }

View file

@ -1,8 +1,8 @@
pub mod pagination; pub mod pagination;
mod about_resource; mod about_resource;
mod article_revision_resource;
mod article_resource; mod article_resource;
mod article_revision_resource;
mod changes_resource; mod changes_resource;
mod diff_resource; mod diff_resource;
mod html_resource; mod html_resource;
@ -13,8 +13,8 @@ mod sitemap_resource;
mod temporary_redirect_resource; mod temporary_redirect_resource;
pub use self::about_resource::AboutResource; pub use self::about_resource::AboutResource;
pub use self::article_revision_resource::ArticleRevisionResource;
pub use self::article_resource::ArticleResource; pub use self::article_resource::ArticleResource;
pub use self::article_revision_resource::ArticleRevisionResource;
pub use self::changes_resource::{ChangesLookup, ChangesResource}; pub use self::changes_resource::{ChangesLookup, ChangesResource};
pub use self::diff_resource::{DiffLookup, DiffResource}; pub use self::diff_resource::{DiffLookup, DiffResource};
pub use self::html_resource::HtmlResource; pub use self::html_resource::HtmlResource;

View file

@ -52,9 +52,10 @@ impl Resource for NewArticleResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::NotFound) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::NotFound)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
@ -66,7 +67,7 @@ impl Resource for NewArticleResource {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/article.html"] #[template = "templates/article.html"]
struct Template<'a> { struct Template<'a> {
revision: &'a str, revision: &'a str,
last_updated: Option<&'a str>, last_updated: Option<&'a str>,
@ -84,42 +85,48 @@ impl Resource for NewArticleResource {
} }
} }
let title = self.slug.as_ref() let title = self
.slug
.as_ref()
.map_or("".to_owned(), |x| title_from_slug(x)); .map_or("".to_owned(), |x| title_from_slug(x));
Box::new(self.head() Box::new(self.head().and_then(move |head| {
.and_then(move |head| { Ok(head.with_body(
Ok(head Layout {
.with_body(Layout { base: None, // Hmm, should perhaps accept `base` as argument
base: None, // Hmm, should perhaps accept `base` as argument title: &title,
theme: theme::Theme::Gray,
body: &Template {
revision: NEW,
last_updated: None,
edit: self.edit,
cancel_url: self.slug.as_ref().map(|x| &**x),
title: &title, title: &title,
theme: theme::Theme::Gray, raw: "",
body: &Template { rendered: EMPTY_ARTICLE_MESSAGE,
revision: NEW, themes: &theme::THEMES
last_updated: None, .iter()
edit: self.edit, .map(|&x| SelectableTheme {
cancel_url: self.slug.as_ref().map(|x| &**x),
title: &title,
raw: "",
rendered: EMPTY_ARTICLE_MESSAGE,
themes: &theme::THEMES.iter().map(|&x| SelectableTheme {
theme: x, theme: x,
selected: false, selected: false,
}).collect::<Vec<_>>(), })
}, .collect::<Vec<_>>(),
}.to_string())) },
})) }
.to_string(),
))
}))
} }
fn put(self: Box<Self>, body: hyper::Body, identity: Option<String>) -> ResponseFuture { fn put(self: Box<Self>, body: hyper::Body, identity: Option<String>) -> ResponseFuture {
// TODO Check incoming Content-Type // TODO Check incoming Content-Type
// TODO Refactor? Reduce duplication with ArticleResource::put? // TODO Refactor? Reduce duplication with ArticleResource::put?
use chrono::{TimeZone, Local}; use chrono::{Local, TimeZone};
use futures::Stream; use futures::Stream;
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/article_contents.html"] #[template = "templates/article_contents.html"]
struct Template<'a> { struct Template<'a> {
title: &'a str, title: &'a str,
rendered: String, rendered: String,
@ -137,43 +144,51 @@ impl Resource for NewArticleResource {
last_updated: &'a str, last_updated: &'a str,
} }
Box::new(body Box::new(
.concat2() body.concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| { .and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
serde_urlencoded::from_bytes(&body) .and_then(move |arg: CreateArticle| {
.map_err(Into::into) if arg.base_revision != NEW {
}) unimplemented!("Version update conflict");
.and_then(move |arg: CreateArticle| { }
if arg.base_revision != NEW { let theme = arg.theme.unwrap_or_else(theme::random);
unimplemented!("Version update conflict"); self.state.create_article(
} self.slug.clone(),
let theme = arg.theme.unwrap_or_else(theme::random); arg.title,
self.state.create_article(self.slug.clone(), arg.title, arg.body, identity, theme) arg.body,
}) identity,
.and_then(|updated| { theme,
futures::finished(Response::new() )
.with_status(hyper::StatusCode::Ok) })
.with_header(ContentType(APPLICATION_JSON.clone())) .and_then(|updated| {
.with_body(serde_json::to_string(&PutResponse { futures::finished(
slug: &updated.slug, Response::new()
article_id: updated.article_id, .with_status(hyper::StatusCode::Ok)
revision: updated.revision, .with_header(ContentType(APPLICATION_JSON.clone()))
title: &updated.title, .with_body(
body: &updated.body, serde_json::to_string(&PutResponse {
theme: updated.theme, slug: &updated.slug,
rendered: &Template { article_id: updated.article_id,
title: &updated.title, revision: updated.revision,
rendered: render_markdown(&updated.body), title: &updated.title,
}.to_string(), body: &updated.body,
last_updated: &super::article_resource::last_updated( theme: updated.theme,
updated.article_id, rendered: &Template {
&Local.from_utc_datetime(&updated.created), title: &updated.title,
updated.author.as_ref().map(|x| &**x) rendered: render_markdown(&updated.body),
), }
}).expect("Should never fail")) .to_string(),
) last_updated: &super::article_resource::last_updated(
}) updated.article_id,
&Local.from_utc_datetime(&updated.created),
updated.author.as_ref().map(|x| &**x),
),
})
.expect("Should never fail"),
),
)
}),
) )
} }
@ -183,28 +198,32 @@ impl Resource for NewArticleResource {
use futures::Stream; use futures::Stream;
Box::new(body Box::new(
.concat2() body.concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| { .and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
serde_urlencoded::from_bytes(&body) .and_then(move |arg: CreateArticle| {
.map_err(Into::into) if arg.base_revision != NEW {
}) unimplemented!("Version update conflict");
.and_then(move |arg: CreateArticle| { }
if arg.base_revision != NEW { let theme = arg.theme.unwrap_or_else(theme::random);
unimplemented!("Version update conflict"); self.state.create_article(
} self.slug.clone(),
let theme = arg.theme.unwrap_or_else(theme::random); arg.title,
self.state.create_article(self.slug.clone(), arg.title, arg.body, identity, theme) arg.body,
}) identity,
.and_then(|updated| { theme,
futures::finished(Response::new() )
.with_status(hyper::StatusCode::SeeOther) })
.with_header(ContentType(TEXT_PLAIN.clone())) .and_then(|updated| {
.with_header(Location::new(updated.link().to_owned())) futures::finished(
.with_body("See other") Response::new()
) .with_status(hyper::StatusCode::SeeOther)
}) .with_header(ContentType(TEXT_PLAIN.clone()))
.with_header(Location::new(updated.link().to_owned()))
.with_body("See other"),
)
}),
) )
} }
} }

View file

@ -37,7 +37,7 @@ impl<T> PaginationStruct<T> {
(Some(x), None) => Ok(Pagination::After(x)), (Some(x), None) => Ok(Pagination::After(x)),
(None, Some(x)) => Ok(Pagination::Before(x)), (None, Some(x)) => Ok(Pagination::Before(x)),
(None, None) => Ok(Pagination::None), (None, None) => Ok(Pagination::None),
_ => Err(Error) _ => Err(Error),
} }
} }
} }

View file

@ -1,5 +1,5 @@
use futures::Future; use futures::Future;
use hyper::header::{ContentType, ContentLength, CacheControl, CacheDirective}; use hyper::header::{CacheControl, CacheDirective, ContentLength, ContentType};
use hyper::server::*; use hyper::server::*;
use hyper::StatusCode; use hyper::StatusCode;
@ -18,21 +18,21 @@ impl Resource for ReadOnlyResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(::futures::finished(Response::new() Box::new(::futures::finished(
.with_status(StatusCode::Ok) Response::new()
.with_header(ContentType(self.content_type.clone())) .with_status(StatusCode::Ok)
.with_header(CacheControl(vec![ .with_header(ContentType(self.content_type.clone()))
CacheDirective::MustRevalidate, .with_header(CacheControl(vec![
CacheDirective::NoStore, CacheDirective::MustRevalidate,
])) CacheDirective::NoStore,
])),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
Box::new(self.head().map(move |head| Box::new(self.head().map(move |head| {
head head.with_header(ContentLength(self.body.len() as u64))
.with_header(ContentLength(self.body.len() as u64))
.with_body(self.body.clone()) .with_body(self.body.clone())
)) }))
} }
} }

View file

@ -34,14 +34,22 @@ impl QueryParameters {
pub fn limit(self, limit: u32) -> Self { pub fn limit(self, limit: u32) -> Self {
Self { Self {
limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None }, limit: if limit != DEFAULT_LIMIT {
Some(limit)
} else {
None
},
..self ..self
} }
} }
pub fn snippet_size(self, snippet_size: u32) -> Self { pub fn snippet_size(self, snippet_size: u32) -> Self {
Self { Self {
snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE { Some(snippet_size) } else { None }, snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE {
Some(snippet_size)
} else {
None
},
..self ..self
} }
} }
@ -69,15 +77,13 @@ impl SearchLookup {
pub fn lookup(&self, query: Option<&str>) -> Result<Option<BoxResource>, crate::web::Error> { pub fn lookup(&self, query: Option<&str>) -> Result<Option<BoxResource>, crate::web::Error> {
let args: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?; let args: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?;
Ok(Some(Box::new( Ok(Some(Box::new(SearchResource::new(
SearchResource::new( self.state.clone(),
self.state.clone(), args.q,
args.q, args.limit.unwrap_or(DEFAULT_LIMIT),
args.limit.unwrap_or(DEFAULT_LIMIT), args.offset.unwrap_or(0),
args.offset.unwrap_or(0), args.snippet_size.unwrap_or(DEFAULT_SNIPPET_SIZE),
args.snippet_size.unwrap_or(DEFAULT_SNIPPET_SIZE), ))))
)
)))
} }
} }
@ -98,8 +104,21 @@ pub enum ResponseType {
} }
impl SearchResource { impl SearchResource {
pub fn new(state: State, query: Option<String>, limit: u32, offset: u32, snippet_size: u32) -> Self { pub fn new(
Self { state, response_type: ResponseType::Html, query, limit, offset, snippet_size } state: State,
query: Option<String>,
limit: u32,
offset: u32,
snippet_size: u32,
) -> Self {
Self {
state,
response_type: ResponseType::Html,
query,
limit,
offset,
snippet_size,
}
} }
fn query_args(&self) -> QueryParameters { fn query_args(&self) -> QueryParameters {
@ -107,9 +126,9 @@ impl SearchResource {
q: self.query.clone(), q: self.query.clone(),
..QueryParameters::default() ..QueryParameters::default()
} }
.offset(self.offset) .offset(self.offset)
.limit(self.limit) .limit(self.limit)
.snippet_size(self.snippet_size) .snippet_size(self.snippet_size)
} }
} }
@ -126,8 +145,10 @@ impl Resource for SearchResource {
self.response_type = match accept.first() { self.response_type = match accept.first() {
Some(&QualityItem { item: ref mime, .. }) Some(&QualityItem { item: ref mime, .. })
if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON =>
=> ResponseType::Json, {
ResponseType::Json
}
_ => ResponseType::Html, _ => ResponseType::Html,
}; };
} }
@ -138,9 +159,10 @@ impl Resource for SearchResource {
&ResponseType::Html => ContentType(TEXT_HTML.clone()), &ResponseType::Html => ContentType(TEXT_HTML.clone()),
}; };
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(content_type) .with_status(hyper::StatusCode::Ok)
.with_header(content_type),
)) ))
} }
@ -154,7 +176,7 @@ impl Resource for SearchResource {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/search.html"] #[template = "templates/search.html"]
struct Template<'a> { struct Template<'a> {
query: &'a str, query: &'a str,
hits: &'a [(usize, &'a SearchResult)], hits: &'a [(usize, &'a SearchResult)],
@ -163,54 +185,66 @@ impl Resource for SearchResource {
} }
// TODO: Show a search "front page" when no query is given: // TODO: Show a search "front page" when no query is given:
let query = self.query.as_ref().map(|x| x.clone()).unwrap_or("".to_owned()); let query = self
.query
.as_ref()
.map(|x| x.clone())
.unwrap_or("".to_owned());
let data = self.state.search_query(query, (self.limit + 1) as i32, self.offset as i32, self.snippet_size as i32); let data = self.state.search_query(
query,
(self.limit + 1) as i32,
self.offset as i32,
self.snippet_size as i32,
);
let head = self.head(); let head = self.head();
Box::new(data.join(head) Box::new(data.join(head).and_then(move |(mut data, head)| {
.and_then(move |(mut data, head)| { let prev = if self.offset > 0 {
let prev = if self.offset > 0 { Some(
Some(self.query_args() self.query_args()
.offset(self.offset.saturating_sub(self.limit)) .offset(self.offset.saturating_sub(self.limit))
.into_link() .into_link(),
) )
} else { } else {
None None
}; };
let next = if data.len() > self.limit as usize { let next = if data.len() > self.limit as usize {
data.pop(); data.pop();
Some(self.query_args() Some(
self.query_args()
.offset(self.offset + self.limit) .offset(self.offset + self.limit)
.into_link() .into_link(),
) )
} else { } else {
None None
}; };
match &self.response_type { match &self.response_type {
&ResponseType::Json => Ok(head &ResponseType::Json => Ok(head.with_body(
.with_body(serde_json::to_string(&JsonResponse { serde_json::to_string(&JsonResponse {
query: self.query.as_ref().map(|x| &**x).unwrap_or(""), query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
hits: &data, hits: &data,
prev, prev,
next, next,
}).expect("Should never fail")) })
), .expect("Should never fail"),
&ResponseType::Html => Ok(head.with_body(system_page( )),
&ResponseType::Html => Ok(head.with_body(
system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"Search", "Search",
&Template { &Template {
query: self.query.as_ref().map(|x| &**x).unwrap_or(""), query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
hits: &data.iter() hits: &data.iter().enumerate().collect::<Vec<_>>(),
.enumerate()
.collect::<Vec<_>>(),
prev, prev,
next, next,
}, },
).to_string())), )
} .to_string(),
})) )),
}
}))
} }
} }

View file

@ -26,15 +26,16 @@ impl Resource for SitemapResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::Ok) Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/sitemap.html"] #[template = "templates/sitemap.html"]
struct Template<'a> { struct Template<'a> {
articles: &'a [ArticleRevisionStub], articles: &'a [ArticleRevisionStub],
} }
@ -42,15 +43,17 @@ impl Resource for SitemapResource {
let data = self.state.get_latest_article_revision_stubs(); let data = self.state.get_latest_article_revision_stubs();
let head = self.head(); let head = self.head();
Box::new(data.join(head) Box::new(data.join(head).and_then(move |(articles, head)| {
.and_then(move |(articles, head)| { Ok(head.with_body(
Ok(head.with_body(system_page( system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"Sitemap", "Sitemap",
Template { Template {
articles: &articles, articles: &articles,
}, },
).to_string())) )
})) .to_string(),
))
}))
} }
} }

View file

@ -15,17 +15,16 @@ impl TemporaryRedirectResource {
} }
pub fn from_slug<S: AsRef<str>>(slug: S, edit: bool) -> Self { pub fn from_slug<S: AsRef<str>>(slug: S, edit: bool) -> Self {
let base = let base = if slug.as_ref().is_empty() {
if slug.as_ref().is_empty() { "."
"." } else {
} else { slug.as_ref()
slug.as_ref() };
};
let tail = if edit { "?edit" } else { "" }; let tail = if edit { "?edit" } else { "" };
Self { Self {
location: format!("{}{}", base, tail) location: format!("{}{}", base, tail),
} }
} }
} }
@ -37,18 +36,18 @@ impl Resource for TemporaryRedirectResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new() Box::new(futures::finished(
.with_status(hyper::StatusCode::TemporaryRedirect) Response::new()
.with_header(Location::new(self.location.clone())) .with_status(hyper::StatusCode::TemporaryRedirect)
.with_header(Location::new(self.location.clone())),
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
Box::new(self.head() Box::new(
.and_then(move |head| { self.head()
Ok(head .and_then(move |head| Ok(head.with_body(format!("Moved to {}", self.location)))),
.with_body(format!("Moved to {}", self.location))) )
}))
} }
fn put(self: Box<Self>, _body: hyper::Body, _identity: Option<String>) -> ResponseFuture { fn put(self: Box<Self>, _body: hyper::Body, _identity: Option<String>) -> ResponseFuture {

View file

@ -4,12 +4,12 @@
use std::fmt; use std::fmt;
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::{Accept, ContentType, Server}; use hyper::header::{Accept, ContentType, Server};
use hyper::mime; use hyper::mime;
use hyper::server::*; use hyper::server::*;
use hyper;
use crate::assets::{ThemesCss, StyleCss, SearchJs}; use crate::assets::{SearchJs, StyleCss, ThemesCss};
use crate::build_config; use crate::build_config;
use crate::theme; use crate::theme;
use crate::web::Lookup; use crate::web::Lookup;
@ -17,8 +17,7 @@ use crate::wiki_lookup::WikiLookup;
lazy_static! { lazy_static! {
static ref TEXT_HTML: mime::Mime = "text/html;charset=utf-8".parse().unwrap(); static ref TEXT_HTML: mime::Mime = "text/html;charset=utf-8".parse().unwrap();
static ref SERVER: Server = static ref SERVER: Server = Server::new(build_config::HTTP_SERVER.as_str());
Server::new(build_config::HTTP_SERVER.as_str());
} }
header! { (XIdentity, "X-Identity") => [String] } header! { (XIdentity, "X-Identity") => [String] }
@ -33,25 +32,38 @@ pub struct Layout<'a, T: 'a + fmt::Display> {
} }
impl<'a, T: 'a + fmt::Display> Layout<'a, T> { impl<'a, T: 'a + fmt::Display> Layout<'a, T> {
pub fn themes_css(&self) -> &str { ThemesCss::resource_name() } pub fn themes_css(&self) -> &str {
pub fn style_css(&self) -> &str { StyleCss::resource_name() } ThemesCss::resource_name()
pub fn search_js(&self) -> &str { SearchJs::resource_name() } }
pub fn style_css(&self) -> &str {
StyleCss::resource_name()
}
pub fn search_js(&self) -> &str {
SearchJs::resource_name()
}
pub fn project_name(&self) -> &str { build_config::PROJECT_NAME } pub fn project_name(&self) -> &str {
pub fn version(&self) -> &str { build_config::VERSION.as_str() } build_config::PROJECT_NAME
}
pub fn version(&self) -> &str {
build_config::VERSION.as_str()
}
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template="templates/system_page_layout.html"] #[template = "templates/system_page_layout.html"]
pub struct SystemPageLayout<'a, T: 'a + fmt::Display> { pub struct SystemPageLayout<'a, T: 'a + fmt::Display> {
title: &'a str, title: &'a str,
html_body: T, html_body: T,
} }
pub fn system_page<'a, T>(base: Option<&'a str>, title: &'a str, body: T) pub fn system_page<'a, T>(
-> Layout<'a, SystemPageLayout<'a, T>> base: Option<&'a str>,
title: &'a str,
body: T,
) -> Layout<'a, SystemPageLayout<'a, T>>
where where
T: 'a + fmt::Display T: 'a + fmt::Display,
{ {
Layout { Layout {
base, base,
@ -79,30 +91,28 @@ pub struct Site {
impl Site { impl Site {
pub fn new(root: WikiLookup, trust_identity: bool) -> Site { pub fn new(root: WikiLookup, trust_identity: bool) -> Site {
Site { root, trust_identity } Site {
root,
trust_identity,
}
} }
fn not_found(base: Option<&str>) -> Response { fn not_found(base: Option<&str>) -> Response {
Response::new() Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body(system_page( .with_body(system_page(base, "Not found", NotFound).to_string())
base,
"Not found",
NotFound,
).to_string())
.with_status(hyper::StatusCode::NotFound) .with_status(hyper::StatusCode::NotFound)
} }
fn internal_server_error(base: Option<&str>, err: Box<dyn ::std::error::Error + Send + Sync>) -> Response { fn internal_server_error(
base: Option<&str>,
err: Box<dyn ::std::error::Error + Send + Sync>,
) -> Response {
eprintln!("Internal Server Error:\n{:#?}", err); eprintln!("Internal Server Error:\n{:#?}", err);
Response::new() Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body(system_page( .with_body(system_page(base, "Internal server error", InternalServerError).to_string())
base,
"Internal server error",
InternalServerError,
).to_string())
.with_status(hyper::StatusCode::InternalServerError) .with_status(hyper::StatusCode::InternalServerError)
} }
} }
@ -113,7 +123,7 @@ fn root_base_from_request_uri(path: &str) -> Option<String> {
match slashes { match slashes {
0 => None, 0 => None,
n => Some(::std::iter::repeat("../").take(n).collect()) n => Some(::std::iter::repeat("../").take(n).collect()),
} }
} }
@ -133,29 +143,41 @@ impl Service for Site {
false => None, false => None,
}; };
let accept_header = headers.get().map(|x: &Accept| x.clone()).unwrap_or(Accept(vec![])); let accept_header = headers
.get()
.map(|x: &Accept| x.clone())
.unwrap_or(Accept(vec![]));
let base = root_base_from_request_uri(uri.path()); let base = root_base_from_request_uri(uri.path());
let base2 = base.clone(); // Bah, stupid clone let base2 = base.clone(); // Bah, stupid clone
Box::new(self.root.lookup(uri.path(), uri.query()) Box::new(
.and_then(move |resource| match resource { self.root
Some(mut resource) => { .lookup(uri.path(), uri.query())
use hyper::Method::*; .and_then(move |resource| match resource {
resource.hacky_inject_accept_header(accept_header); Some(mut resource) => {
match method { use hyper::Method::*;
Options => Box::new(futures::finished(resource.options())), resource.hacky_inject_accept_header(accept_header);
Head => resource.head(), match method {
Get => resource.get(), Options => Box::new(futures::finished(resource.options())),
Put => resource.put(body, identity), Head => resource.head(),
Post => resource.post(body, identity), Get => resource.get(),
_ => Box::new(futures::finished(resource.method_not_allowed())) Put => resource.put(body, identity),
Post => resource.post(body, identity),
_ => Box::new(futures::finished(resource.method_not_allowed())),
}
} }
}, None => Box::new(futures::finished(Self::not_found(
None => Box::new(futures::finished(Self::not_found(base.as_ref().map(|x| &**x)))) base.as_ref().map(|x| &**x),
}) ))),
.or_else(move |err| Ok(Self::internal_server_error(base2.as_ref().map(|x| &**x), err))) })
.map(|response| response.with_header(SERVER.clone())) .or_else(move |err| {
Ok(Self::internal_server_error(
base2.as_ref().map(|x| &**x),
err,
))
})
.map(|response| response.with_header(SERVER.clone())),
) )
} }
} }

View file

@ -1,8 +1,8 @@
use std; use std;
use diesel; use diesel;
use diesel::sqlite::SqliteConnection;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use futures_cpupool::{self, CpuFuture}; use futures_cpupool::{self, CpuFuture};
use r2d2::Pool; use r2d2::Pool;
use r2d2_diesel::ConnectionManager; use r2d2_diesel::ConnectionManager;
@ -22,15 +22,12 @@ pub type Error = Box<dyn std::error::Error + Send + Sync>;
pub enum SlugLookup { pub enum SlugLookup {
Miss, Miss,
Hit { Hit { article_id: i32, revision: i32 },
article_id: i32,
revision: i32,
},
Redirect(String), Redirect(String),
} }
#[derive(Insertable)] #[derive(Insertable)]
#[table_name="article_revisions"] #[table_name = "article_revisions"]
struct NewRevision<'a> { struct NewRevision<'a> {
article_id: i32, article_id: i32,
revision: i32, revision: i32,
@ -52,7 +49,11 @@ pub struct RebaseConflict {
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum RebaseResult { enum RebaseResult {
Clean { title: String, body: String, theme: Theme }, Clean {
title: String,
body: String,
theme: Theme,
},
Conflict(RebaseConflict), Conflict(RebaseConflict),
} }
@ -61,7 +62,13 @@ pub enum UpdateResult {
RebaseConflict(RebaseConflict), RebaseConflict(RebaseConflict),
} }
fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> { fn decide_slug(
conn: &SqliteConnection,
article_id: i32,
prev_title: &str,
title: &str,
prev_slug: Option<&str>,
) -> Result<String, Error> {
let base_slug = ::slug::slugify(title); let base_slug = ::slug::slugify(title);
if let Some(prev_slug) = prev_slug { if let Some(prev_slug) = prev_slug {
@ -79,7 +86,11 @@ fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title
} }
} }
let base_slug = if base_slug.is_empty() { "article" } else { &base_slug }; let base_slug = if base_slug.is_empty() {
"article"
} else {
&base_slug
};
use crate::schema::article_revisions; use crate::schema::article_revisions;
@ -92,7 +103,8 @@ fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title
.filter(article_revisions::slug.eq(&slug)) .filter(article_revisions::slug.eq(&slug))
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
.count() .count()
.first::<i64>(conn)? != 0; .first::<i64>(conn)?
!= 0;
if !slug_in_use { if !slug_in_use {
break Ok(slug); break Ok(slug);
@ -123,7 +135,11 @@ impl<'a> SyncState<'a> {
.optional()?) .optional()?)
} }
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevision>, Error> { pub fn get_article_revision(
&self,
article_id: i32,
revision: i32,
) -> Result<Option<models::ArticleRevision>, Error> {
use crate::schema::article_revisions; use crate::schema::article_revisions;
Ok(article_revisions::table Ok(article_revisions::table
@ -133,12 +149,15 @@ impl<'a> SyncState<'a> {
.optional()?) .optional()?)
} }
pub fn query_article_revision_stubs<F>(&self, f: F) -> Result<Vec<models::ArticleRevisionStub>, Error> pub fn query_article_revision_stubs<F>(
&self,
f: F,
) -> Result<Vec<models::ArticleRevisionStub>, Error>
where where
F: 'static + Send + Sync, F: 'static + Send + Sync,
for <'x> F: for<'x> F: FnOnce(
FnOnce(article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>) -> article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>, ) -> article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
{ {
use crate::schema::article_revisions::dsl::*; use crate::schema::article_revisions::dsl::*;
@ -154,19 +173,24 @@ impl<'a> SyncState<'a> {
author, author,
theme, theme,
)) ))
.load(self.db_connection)? .load(self.db_connection)?)
)
} }
fn get_article_revision_stub(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevisionStub>, Error> { fn get_article_revision_stub(
&self,
article_id: i32,
revision: i32,
) -> Result<Option<models::ArticleRevisionStub>, Error> {
use crate::schema::article_revisions; use crate::schema::article_revisions;
Ok(self.query_article_revision_stubs(move |query| { Ok(self
query .query_article_revision_stubs(move |query| {
.filter(article_revisions::article_id.eq(article_id)) query
.filter(article_revisions::revision.eq(revision)) .filter(article_revisions::article_id.eq(article_id))
.limit(1) .filter(article_revisions::revision.eq(revision))
})?.pop()) .limit(1)
})?
.pop())
} }
pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> { pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> {
@ -180,36 +204,44 @@ impl<'a> SyncState<'a> {
self.db_connection.transaction(|| { self.db_connection.transaction(|| {
use crate::schema::article_revisions; use crate::schema::article_revisions;
Ok(match article_revisions::table Ok(
.filter(article_revisions::slug.eq(slug)) match article_revisions::table
.order(article_revisions::sequence_number.desc()) .filter(article_revisions::slug.eq(slug))
.select(( .order(article_revisions::sequence_number.desc())
article_revisions::article_id, .select((
article_revisions::revision, article_revisions::article_id,
article_revisions::latest, article_revisions::revision,
)) article_revisions::latest,
.first::<ArticleRevisionStub>(self.db_connection) ))
.optional()? .first::<ArticleRevisionStub>(self.db_connection)
{ .optional()?
None => SlugLookup::Miss, {
Some(ref stub) if stub.latest => SlugLookup::Hit { None => SlugLookup::Miss,
article_id: stub.article_id, Some(ref stub) if stub.latest => SlugLookup::Hit {
revision: stub.revision, article_id: stub.article_id,
revision: stub.revision,
},
Some(stub) => SlugLookup::Redirect(
article_revisions::table
.filter(article_revisions::latest.eq(true))
.filter(article_revisions::article_id.eq(stub.article_id))
.select(article_revisions::slug)
.first::<String>(self.db_connection)?,
),
}, },
Some(stub) => SlugLookup::Redirect( )
article_revisions::table
.filter(article_revisions::latest.eq(true))
.filter(article_revisions::article_id.eq(stub.article_id))
.select(article_revisions::slug)
.first::<String>(self.db_connection)?
)
})
}) })
} }
fn rebase_update(&self, article_id: i32, target_base_revision: i32, existing_base_revision: i32, title: String, body: String, theme: Theme) fn rebase_update(
-> Result<RebaseResult, Error> &self,
{ article_id: i32,
target_base_revision: i32,
existing_base_revision: i32,
title: String,
body: String,
theme: Theme,
) -> Result<RebaseResult, Error> {
let mut title_a = title; let mut title_a = title;
let mut body_a = body; let mut body_a = body;
let mut theme_a = theme; let mut theme_a = theme;
@ -226,7 +258,7 @@ impl<'a> SyncState<'a> {
let mut stored = article_revisions::table let mut stored = article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.ge(revision)) .filter(article_revisions::revision.ge(revision))
.filter(article_revisions::revision.le(revision+1)) .filter(article_revisions::revision.le(revision + 1))
.order(article_revisions::revision.asc()) .order(article_revisions::revision.asc())
.select(( .select((
article_revisions::title, article_revisions::title,
@ -242,7 +274,11 @@ impl<'a> SyncState<'a> {
fn merge_themes(a: Theme, o: Theme, b: Theme) -> Theme { fn merge_themes(a: Theme, o: Theme, b: Theme) -> Theme {
// Last change wins // Last change wins
if a != o { a } else { b } if a != o {
a
} else {
b
}
} }
let update = { let update = {
@ -254,12 +290,14 @@ impl<'a> SyncState<'a> {
(Clean(title), Clean(body)) => (title, body, theme), (Clean(title), Clean(body)) => (title, body, theme),
(title_merge, body_merge) => { (title_merge, body_merge) => {
return Ok(RebaseResult::Conflict(RebaseConflict { return Ok(RebaseResult::Conflict(RebaseConflict {
base_article: self.get_article_revision_stub(article_id, revision+1)?.expect("Application layer guarantee"), base_article: self
.get_article_revision_stub(article_id, revision + 1)?
.expect("Application layer guarantee"),
title: title_merge, title: title_merge,
body: body_merge.to_strings(), body: body_merge.to_strings(),
theme, theme,
})); }));
}, }
} }
}; };
@ -268,12 +306,22 @@ impl<'a> SyncState<'a> {
theme_a = update.2; theme_a = update.2;
} }
Ok(RebaseResult::Clean { title: title_a, body: body_a, theme: theme_a }) Ok(RebaseResult::Clean {
title: title_a,
body: body_a,
theme: theme_a,
})
} }
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>, theme: Option<Theme>) pub fn update_article(
-> Result<UpdateResult, Error> &self,
{ article_id: i32,
base_revision: i32,
title: String,
body: String,
author: Option<String>,
theme: Option<Theme>,
) -> Result<UpdateResult, Error> {
if title.is_empty() { if title.is_empty() {
Err("title cannot be empty")?; Err("title cannot be empty")?;
} }
@ -300,7 +348,14 @@ impl<'a> SyncState<'a> {
} }
let theme = theme.unwrap_or(prev_theme); let theme = theme.unwrap_or(prev_theme);
let rebase_result = self.rebase_update(article_id, latest_revision, base_revision, title, body, theme)?; let rebase_result = self.rebase_update(
article_id,
latest_revision,
base_revision,
title,
body,
theme,
)?;
let (title, body, theme) = match rebase_result { let (title, body, theme) = match rebase_result {
RebaseResult::Clean { title, body, theme } => (title, body, theme), RebaseResult::Clean { title, body, theme } => (title, body, theme),
@ -309,15 +364,21 @@ impl<'a> SyncState<'a> {
let new_revision = latest_revision + 1; let new_revision = latest_revision + 1;
let slug = decide_slug(self.db_connection, article_id, &prev_title, &title, Some(&prev_slug))?; let slug = decide_slug(
self.db_connection,
article_id,
&prev_title,
&title,
Some(&prev_slug),
)?;
diesel::update( diesel::update(
article_revisions::table article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(latest_revision)) .filter(article_revisions::revision.eq(latest_revision)),
) )
.set(article_revisions::latest.eq(false)) .set(article_revisions::latest.eq(false))
.execute(self.db_connection)?; .execute(self.db_connection)?;
diesel::insert_into(article_revisions::table) diesel::insert_into(article_revisions::table)
.values(&NewRevision { .values(&NewRevision {
@ -332,26 +393,32 @@ impl<'a> SyncState<'a> {
}) })
.execute(self.db_connection)?; .execute(self.db_connection)?;
Ok(UpdateResult::Success(article_revisions::table Ok(UpdateResult::Success(
.filter(article_revisions::article_id.eq(article_id)) article_revisions::table
.filter(article_revisions::revision.eq(new_revision)) .filter(article_revisions::article_id.eq(article_id))
.first::<models::ArticleRevision>(self.db_connection)? .filter(article_revisions::revision.eq(new_revision))
.first::<models::ArticleRevision>(self.db_connection)?,
)) ))
}) })
} }
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>, theme: Theme) pub fn create_article(
-> Result<models::ArticleRevision, Error> &self,
{ target_slug: Option<String>,
title: String,
body: String,
author: Option<String>,
theme: Theme,
) -> Result<models::ArticleRevision, Error> {
if title.is_empty() { if title.is_empty() {
Err("title cannot be empty")?; Err("title cannot be empty")?;
} }
self.db_connection.transaction(|| { self.db_connection.transaction(|| {
#[derive(Insertable)] #[derive(Insertable)]
#[table_name="articles"] #[table_name = "articles"]
struct NewArticle { struct NewArticle {
id: Option<i32> id: Option<i32>,
} }
let article_id = { let article_id = {
@ -361,10 +428,17 @@ impl<'a> SyncState<'a> {
.execute(self.db_connection)?; .execute(self.db_connection)?;
sql::<(diesel::sql_types::Integer)>("SELECT LAST_INSERT_ROWID()") sql::<(diesel::sql_types::Integer)>("SELECT LAST_INSERT_ROWID()")
.load::<i32>(self.db_connection)? .load::<i32>(self.db_connection)?
.pop().expect("Statement must evaluate to an integer") .pop()
.expect("Statement must evaluate to an integer")
}; };
let slug = decide_slug(self.db_connection, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?; let slug = decide_slug(
self.db_connection,
article_id,
"",
&title,
target_slug.as_ref().map(|x| &**x),
)?;
let new_revision = 1; let new_revision = 1;
@ -384,12 +458,17 @@ impl<'a> SyncState<'a> {
Ok(article_revisions::table Ok(article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(new_revision)) .filter(article_revisions::revision.eq(new_revision))
.first::<models::ArticleRevision>(self.db_connection)? .first::<models::ArticleRevision>(self.db_connection)?)
)
}) })
} }
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> Result<Vec<models::SearchResult>, Error> { pub fn search_query(
&self,
query_string: String,
limit: i32,
offset: i32,
snippet_size: i32,
) -> Result<Vec<models::SearchResult>, Error> {
use diesel::sql_query; use diesel::sql_query;
use diesel::sql_types::{Integer, Text}; use diesel::sql_types::{Integer, Text};
@ -427,7 +506,10 @@ impl<'a> SyncState<'a> {
} }
impl State { impl State {
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State { pub fn new(
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
cpu_pool: futures_cpupool::CpuPool,
) -> State {
State { State {
connection_pool, connection_pool,
cpu_pool, cpu_pool,
@ -437,7 +519,7 @@ impl State {
fn execute<F, T>(&self, f: F) -> CpuFuture<T, Error> fn execute<F, T>(&self, f: F) -> CpuFuture<T, Error>
where where
F: 'static + Sync + Send, F: 'static + Sync + Send,
for <'a> F: FnOnce(SyncState<'a>) -> Result<T, Error>, for<'a> F: FnOnce(SyncState<'a>) -> Result<T, Error>,
T: 'static + Send, T: 'static + Send,
{ {
let connection_pool = self.connection_pool.clone(); let connection_pool = self.connection_pool.clone();
@ -453,21 +535,30 @@ impl State {
self.execute(move |state| state.get_article_slug(article_id)) self.execute(move |state| state.get_article_slug(article_id))
} }
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> { pub fn get_article_revision(
&self,
article_id: i32,
revision: i32,
) -> CpuFuture<Option<models::ArticleRevision>, Error> {
self.execute(move |state| state.get_article_revision(article_id, revision)) self.execute(move |state| state.get_article_revision(article_id, revision))
} }
pub fn query_article_revision_stubs<F>(&self, f: F) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> pub fn query_article_revision_stubs<F>(
&self,
f: F,
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
where where
F: 'static + Send + Sync, F: 'static + Send + Sync,
for <'a> F: for<'a> F: FnOnce(
FnOnce(article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>, ) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
{ {
self.execute(move |state| state.query_article_revision_stubs(f)) self.execute(move |state| state.query_article_revision_stubs(f))
} }
pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> { pub fn get_latest_article_revision_stubs(
&self,
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
self.query_article_revision_stubs(|query| { self.query_article_revision_stubs(|query| {
query query
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
@ -479,19 +570,38 @@ impl State {
self.execute(move |state| state.lookup_slug(slug)) self.execute(move |state| state.lookup_slug(slug))
} }
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>, theme: Option<Theme>) pub fn update_article(
-> CpuFuture<UpdateResult, Error> &self,
{ article_id: i32,
self.execute(move |state| state.update_article(article_id, base_revision, title, body, author, theme)) base_revision: i32,
title: String,
body: String,
author: Option<String>,
theme: Option<Theme>,
) -> CpuFuture<UpdateResult, Error> {
self.execute(move |state| {
state.update_article(article_id, base_revision, title, body, author, theme)
})
} }
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>, theme: Theme) pub fn create_article(
-> CpuFuture<models::ArticleRevision, Error> &self,
{ target_slug: Option<String>,
title: String,
body: String,
author: Option<String>,
theme: Theme,
) -> CpuFuture<models::ArticleRevision, Error> {
self.execute(move |state| state.create_article(target_slug, title, body, author, theme)) self.execute(move |state| state.create_article(target_slug, title, body, author, theme))
} }
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> CpuFuture<Vec<models::SearchResult>, Error> { pub fn search_query(
&self,
query_string: String,
limit: i32,
offset: i32,
snippet_size: i32,
) -> CpuFuture<Vec<models::SearchResult>, Error> {
self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size)) self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size))
} }
} }
@ -505,7 +615,7 @@ mod test {
pub fn unwrap(self) -> models::ArticleRevision { pub fn unwrap(self) -> models::ArticleRevision {
match self { match self {
UpdateResult::Success(x) => x, UpdateResult::Success(x) => x,
_ => panic!("Expected success") _ => panic!("Expected success"),
} }
} }
} }
@ -514,7 +624,7 @@ mod test {
($state:ident) => { ($state:ident) => {
let db = db::test_connection(); let db = db::test_connection();
let $state = SyncState::new(&db); let $state = SyncState::new(&db);
} };
} }
#[test] #[test]
@ -526,7 +636,9 @@ mod test {
#[test] #[test]
fn create_article() { fn create_article() {
init!(state); init!(state);
let article_revision = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap(); let article_revision = state
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
assert_eq!("title", article_revision.slug); assert_eq!("title", article_revision.slug);
assert_eq!(true, article_revision.latest); assert_eq!(true, article_revision.latest);
assert_eq!(Theme::Cyan, article_revision.theme); assert_eq!(Theme::Cyan, article_revision.theme);
@ -536,7 +648,15 @@ mod test {
fn create_article_when_empty_slug_then_empty_slug() { fn create_article_when_empty_slug_then_empty_slug() {
// Front page gets to keep its empty slug // Front page gets to keep its empty slug
init!(state); init!(state);
let article_revision = state.create_article(Some("".into()), "Title".into(), "Body".into(), None, Theme::Cyan).unwrap(); let article_revision = state
.create_article(
Some("".into()),
"Title".into(),
"Body".into(),
None,
Theme::Cyan,
)
.unwrap();
assert_eq!("", article_revision.slug); assert_eq!("", article_revision.slug);
} }
@ -544,9 +664,21 @@ mod test {
fn update_article() { fn update_article() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
let new_revision = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None, Some(Theme::BlueGray)).unwrap().unwrap(); let new_revision = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"New body".into(),
None,
Some(Theme::BlueGray),
)
.unwrap()
.unwrap();
assert_eq!(article.article_id, new_revision.article_id); assert_eq!(article.article_id, new_revision.article_id);
@ -566,10 +698,32 @@ mod test {
fn update_article_when_sequential_edits_then_last_wins() { fn update_article_when_sequential_edits_then_last_wins() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None, Some(Theme::Blue)).unwrap().unwrap(); let first_edit = state
let second_edit = state.update_article(article.article_id, first_edit.revision, article.title.clone(), "Newer body".into(), None, Some(Theme::Amber)).unwrap().unwrap(); .update_article(
article.article_id,
article.revision,
article.title.clone(),
"New body".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
first_edit.revision,
article.title.clone(),
"Newer body".into(),
None,
Some(Theme::Amber),
)
.unwrap()
.unwrap();
assert_eq!("Newer body", second_edit.body); assert_eq!("Newer body", second_edit.body);
assert_eq!(Theme::Amber, second_edit.theme); assert_eq!(Theme::Amber, second_edit.theme);
@ -579,10 +733,32 @@ mod test {
fn update_article_when_edit_conflict_then_merge() { fn update_article_when_edit_conflict_then_merge() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None, Some(Theme::Blue)).unwrap().unwrap(); let first_edit = state
let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(Theme::Amber)).unwrap().unwrap(); .update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nx\nb\nc\n".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nb\ny\nc\n".into(),
None,
Some(Theme::Amber),
)
.unwrap()
.unwrap();
assert!(article.revision < first_edit.revision); assert!(article.revision < first_edit.revision);
assert!(first_edit.revision < second_edit.revision); assert!(first_edit.revision < second_edit.revision);
@ -595,13 +771,55 @@ mod test {
fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() { fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx1\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap(); let edit = state
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap(); .update_article(
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nx3\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap(); article.article_id,
article.revision,
article.title.clone(),
"a\nx1\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let edit = state
.update_article(
article.article_id,
edit.revision,
article.title.clone(),
"a\nx1\nx2\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let edit = state
.update_article(
article.article_id,
edit.revision,
article.title.clone(),
"a\nx1\nx2\nx3\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let rebase_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(article.theme)).unwrap().unwrap(); let rebase_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nb\ny\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
assert!(article.revision < edit.revision); assert!(article.revision < edit.revision);
assert!(edit.revision < rebase_edit.revision); assert!(edit.revision < rebase_edit.revision);
@ -613,10 +831,32 @@ mod test {
fn update_article_when_title_edit_conflict_then_merge_title() { fn update_article_when_title_edit_conflict_then_merge_title() {
init!(state); init!(state);
let article = state.create_article(None, "titlle".into(), "".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "titlle".into(), "".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state.update_article(article.article_id, article.revision, "Titlle".into(), article.body.clone(), None, Some(article.theme)).unwrap().unwrap(); let first_edit = state
let second_edit = state.update_article(article.article_id, article.revision, "title".into(), article.body.clone(), None, Some(article.theme)).unwrap().unwrap(); .update_article(
article.article_id,
article.revision,
"Titlle".into(),
article.body.clone(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
"title".into(),
article.body.clone(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
assert!(article.revision < first_edit.revision); assert!(article.revision < first_edit.revision);
assert!(first_edit.revision < second_edit.revision); assert!(first_edit.revision < second_edit.revision);
@ -628,19 +868,51 @@ mod test {
fn update_article_when_merge_conflict() { fn update_article_when_merge_conflict() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "a".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "a".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "b".into(), None, Some(Theme::Blue)).unwrap().unwrap(); let first_edit = state
let conflict_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "c".into(), None, Some(Theme::Amber)).unwrap(); .update_article(
article.article_id,
article.revision,
article.title.clone(),
"b".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let conflict_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"c".into(),
None,
Some(Theme::Amber),
)
.unwrap();
match conflict_edit { match conflict_edit {
UpdateResult::Success(..) => panic!("Expected conflict"), UpdateResult::Success(..) => panic!("Expected conflict"),
UpdateResult::RebaseConflict(RebaseConflict { base_article, title, body, theme }) => { UpdateResult::RebaseConflict(RebaseConflict {
base_article,
title,
body,
theme,
}) => {
assert_eq!(first_edit.revision, base_article.revision); assert_eq!(first_edit.revision, base_article.revision);
assert_eq!(title, merge::MergeResult::Clean(article.title.clone())); assert_eq!(title, merge::MergeResult::Clean(article.title.clone()));
assert_eq!(body, merge::MergeResult::Conflicted(vec![ assert_eq!(
merge::Output::Conflict(vec!["c"], vec!["a"], vec!["b"]), body,
]).to_strings()); merge::MergeResult::Conflicted(vec![merge::Output::Conflict(
vec!["c"],
vec!["a"],
vec!["b"]
),])
.to_strings()
);
assert_eq!(Theme::Amber, theme); assert_eq!(Theme::Amber, theme);
} }
}; };
@ -650,10 +922,32 @@ mod test {
fn update_article_when_theme_conflict_then_ignore_unchanged() { fn update_article_when_theme_conflict_then_ignore_unchanged() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let _first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None, Some(Theme::Blue)).unwrap().unwrap(); let _first_edit = state
let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(Theme::Cyan)).unwrap().unwrap(); .update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nx\nb\nc\n".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nb\ny\nc\n".into(),
None,
Some(Theme::Cyan),
)
.unwrap()
.unwrap();
assert_eq!(Theme::Blue, second_edit.theme); assert_eq!(Theme::Blue, second_edit.theme);
} }
@ -662,9 +956,21 @@ mod test {
fn update_article_with_no_given_theme_then_theme_unchanged() { fn update_article_with_no_given_theme_then_theme_unchanged() {
init!(state); init!(state);
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap(); let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let edit = state.update_article(article.article_id, article.revision, article.title, article.body, None, None).unwrap().unwrap(); let edit = state
.update_article(
article.article_id,
article.revision,
article.title,
article.body,
None,
None,
)
.unwrap()
.unwrap();
assert_eq!(Theme::Cyan, edit.theme); assert_eq!(Theme::Cyan, edit.theme);
} }

View file

@ -8,9 +8,8 @@ use diesel::sqlite::Sqlite;
use rand; use rand;
use seahash; use seahash;
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)] // Serde
#[derive(Serialize, Deserialize)] // Serde #[serde(rename_all = "kebab-case")]
#[serde(rename_all="kebab-case")]
#[derive(AsExpression, FromSqlRow)] // Diesel #[derive(AsExpression, FromSqlRow)] // Diesel
#[sql_type = "Text"] #[sql_type = "Text"]
pub enum Theme { pub enum Theme {
@ -40,9 +39,10 @@ use self::Theme::*;
forward_display_to_serde!(Theme); forward_display_to_serde!(Theme);
forward_from_str_to_serde!(Theme); forward_from_str_to_serde!(Theme);
pub const THEMES: [Theme; 19] = [Red, Pink, Purple, DeepPurple, Indigo, Blue, pub const THEMES: [Theme; 19] = [
LightBlue, Cyan, Teal, Green, LightGreen, Lime, Yellow, Amber, Orange, Red, Pink, Purple, DeepPurple, Indigo, Blue, LightBlue, Cyan, Teal, Green, LightGreen, Lime,
DeepOrange, Brown, Gray, BlueGray]; Yellow, Amber, Orange, DeepOrange, Brown, Gray, BlueGray,
];
pub fn theme_from_str_hash(x: &str) -> Theme { pub fn theme_from_str_hash(x: &str) -> Theme {
let hash = seahash::hash(x.as_bytes()) as usize; let hash = seahash::hash(x.as_bytes()) as usize;
@ -52,7 +52,8 @@ pub fn theme_from_str_hash(x: &str) -> Theme {
pub fn random() -> Theme { pub fn random() -> Theme {
use rand::Rng; use rand::Rng;
*rand::thread_rng().choose(&THEMES) *rand::thread_rng()
.choose(&THEMES)
.expect("Could only fail for an empty slice") .expect("Could only fail for an empty slice")
} }
@ -73,7 +74,6 @@ impl FromSql<Text, Sqlite> for Theme {
} }
} }
pub struct CssClass(Theme); pub struct CssClass(Theme);
impl Theme { impl Theme {
@ -90,7 +90,6 @@ impl Display for CssClass {
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::error::Error; use std::error::Error;
@ -111,13 +110,18 @@ mod test {
#[test] #[test]
fn serialize_kebab_case() { fn serialize_kebab_case() {
assert_eq!(serde_plain::to_string(&Theme::LightGreen).unwrap(), "light-green"); assert_eq!(
serde_plain::to_string(&Theme::LightGreen).unwrap(),
"light-green"
);
} }
#[test] #[test]
fn serialize_json() { fn serialize_json() {
#[derive(Serialize)] #[derive(Serialize)]
struct Test { x: Theme } struct Test {
x: Theme,
}
assert_eq!( assert_eq!(
serde_json::to_string(&Test { x: Theme::Red }).unwrap(), serde_json::to_string(&Test { x: Theme::Red }).unwrap(),
"{\"x\":\"red\"}" "{\"x\":\"red\"}"
@ -127,7 +131,9 @@ mod test {
#[test] #[test]
fn deserialize_json() { fn deserialize_json() {
#[derive(Deserialize, Debug, PartialEq, Eq)] #[derive(Deserialize, Debug, PartialEq, Eq)]
struct Test { x: Theme } struct Test {
x: Theme,
}
assert_eq!( assert_eq!(
serde_json::from_str::<Test>("{\"x\":\"red\"}").unwrap(), serde_json::from_str::<Test>("{\"x\":\"red\"}").unwrap(),
Test { x: Theme::Red } Test { x: Theme::Red }
@ -137,7 +143,9 @@ mod test {
#[test] #[test]
fn serialize_urlencoded() { fn serialize_urlencoded() {
#[derive(Serialize)] #[derive(Serialize)]
struct Test { x: Theme } struct Test {
x: Theme,
}
assert_eq!( assert_eq!(
serde_urlencoded::to_string(&Test { x: Theme::Red }).unwrap(), serde_urlencoded::to_string(&Test { x: Theme::Red }).unwrap(),
"x=red" "x=red"
@ -147,7 +155,9 @@ mod test {
#[test] #[test]
fn deserialize_urlencoded() { fn deserialize_urlencoded() {
#[derive(Deserialize, Debug, PartialEq, Eq)] #[derive(Deserialize, Debug, PartialEq, Eq)]
struct Test { x: Theme } struct Test {
x: Theme,
}
assert_eq!( assert_eq!(
serde_urlencoded::from_str::<Test>("x=red").unwrap(), serde_urlencoded::from_str::<Test>("x=red").unwrap(),
Test { x: Theme::Red } Test { x: Theme::Red }
@ -192,13 +202,16 @@ mod test {
let conn = SqliteConnection::establish(":memory:")?; let conn = SqliteConnection::establish(":memory:")?;
#[derive(QueryableByName, PartialEq, Eq, Debug)] #[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row { #[sql_type = "Text"] theme: Theme } struct Row {
#[sql_type = "Text"]
theme: Theme,
}
let res = sql_query("SELECT ? as theme") let res = sql_query("SELECT ? as theme")
.bind::<Text, _>(DeepPurple) .bind::<Text, _>(DeepPurple)
.load::<Row>(&conn)?; .load::<Row>(&conn)?;
assert_eq!(&[ Row { theme: DeepPurple } ], res.as_slice()); assert_eq!(&[Row { theme: DeepPurple }], res.as_slice());
Ok(()) Ok(())
} }
@ -208,14 +221,15 @@ mod test {
let conn = SqliteConnection::establish(":memory:")?; let conn = SqliteConnection::establish(":memory:")?;
#[derive(QueryableByName, PartialEq, Eq, Debug)] #[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row { #[sql_type = "Text"] theme: Theme } struct Row {
#[sql_type = "Text"]
theme: Theme,
}
let res = sql_query("SELECT 'green' as theme") let res = sql_query("SELECT 'green' as theme").load::<Row>(&conn);
.load::<Row>(&conn);
assert!(res.is_ok()); assert!(res.is_ok());
let res = sql_query("SELECT 'blueish-yellow' as theme") let res = sql_query("SELECT 'blueish-yellow' as theme").load::<Row>(&conn);
.load::<Row>(&conn);
assert!(res.is_err()); assert!(res.is_err());
Ok(()) Ok(())

View file

@ -3,7 +3,7 @@ use futures;
pub trait Lookup { pub trait Lookup {
type Resource; type Resource;
type Error; type Error;
type Future: futures::Future<Item=Option<Self::Resource>, Error=Self::Error>; type Future: futures::Future<Item = Option<Self::Resource>, Error = Self::Error>;
fn lookup(&self, path: &str, query: Option<&str>) -> Self::Future; fn lookup(&self, path: &str, query: Option<&str>) -> Self::Future;
} }

View file

@ -1,5 +1,5 @@
mod resource;
mod lookup; mod lookup;
mod resource;
pub use self::resource::*;
pub use self::lookup::*; pub use self::lookup::*;
pub use self::resource::*;

View file

@ -1,7 +1,7 @@
use futures; use futures;
use futures::{Future, Stream}; use futures::{Future, Stream};
use hyper::{self, header, mime, server};
use hyper::server::Response; use hyper::server::Response;
use hyper::{self, header, mime, server};
use std; use std;
lazy_static! { lazy_static! {
@ -23,22 +23,24 @@ pub trait Resource {
} }
fn put(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture fn put(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
where Self: 'static where
Self: 'static,
{ {
Box::new(body Box::new(
.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) }) body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
.map_err(Into::into) .map_err(Into::into)
.and_then(move |_| futures::finished(self.method_not_allowed())) .and_then(move |_| futures::finished(self.method_not_allowed())),
) )
} }
fn post(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture fn post(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
where Self: 'static where
Self: 'static,
{ {
Box::new(body Box::new(
.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) }) body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
.map_err(Into::into) .map_err(Into::into)
.and_then(move |_| futures::finished(self.method_not_allowed())) .and_then(move |_| futures::finished(self.method_not_allowed())),
) )
} }

View file

@ -2,8 +2,8 @@ use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::Utf8Error; use std::str::Utf8Error;
use futures::{Future, finished, failed, done};
use futures::future::FutureResult; use futures::future::FutureResult;
use futures::{done, failed, finished, Future};
use percent_encoding::percent_decode; use percent_encoding::percent_decode;
use slug::slugify; use slug::slugify;
@ -18,7 +18,7 @@ type BoxResource = Box<dyn Resource + Sync + Send>;
type ResourceFn = Box<dyn Fn() -> BoxResource + Sync + Send>; type ResourceFn = Box<dyn Fn() -> BoxResource + Sync + Send>;
lazy_static! { lazy_static! {
static ref LICENSES_MAP: HashMap<&'static str, ResourceFn> = hashmap!{ static ref LICENSES_MAP: HashMap<&'static str, ResourceFn> = hashmap! {
"bsd-3-clause" => Box::new(|| Box::new( "bsd-3-clause" => Box::new(|| Box::new(
HtmlResource::new(Some("../"), "The 3-Clause BSD License", include_str!("licenses/bsd-3-clause.html")) HtmlResource::new(Some("../"), "The 3-Clause BSD License", include_str!("licenses/bsd-3-clause.html"))
) as BoxResource) as ResourceFn, ) as BoxResource) as ResourceFn,
@ -54,9 +54,10 @@ fn split_one(path: &str) -> Result<(Cow<str>, Option<&str>), Utf8Error> {
Ok((head, tail)) Ok((head, tail))
} }
fn map_lookup(map: &HashMap<&str, ResourceFn>, path: &str) -> fn map_lookup(
FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> map: &HashMap<&str, ResourceFn>,
{ path: &str,
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
let (head, tail) = match split_one(path) { let (head, tail) = match split_one(path) {
Ok(x) => x, Ok(x) => x,
Err(x) => return failed(x.into()), Err(x) => return failed(x.into()),
@ -73,9 +74,10 @@ fn map_lookup(map: &HashMap<&str, ResourceFn>, path: &str) ->
} }
#[allow(unused)] #[allow(unused)]
fn fs_lookup(root: &str, path: &str) -> fn fs_lookup(
FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> root: &str,
{ path: &str,
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
use std::fs::File; use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
@ -87,17 +89,17 @@ fn fs_lookup(root: &str, path: &str) ->
Some("js") => "application/javascript", Some("js") => "application/javascript",
Some("woff") => "application/font-woff", Some("woff") => "application/font-woff",
_ => "application/binary", _ => "application/binary",
}.parse().unwrap(); }
.parse()
.unwrap();
let mut filename = root.to_string(); let mut filename = root.to_string();
filename.push_str(path); filename.push_str(path);
let mut f = File::open(&filename) let mut f = File::open(&filename).unwrap_or_else(|_| panic!("Not found: {}", filename));
.unwrap_or_else(|_| panic!("Not found: {}", filename));
let mut body = Vec::new(); let mut body = Vec::new();
f.read_to_end(&mut body) f.read_to_end(&mut body).expect("Unable to read file");
.expect("Unable to read file");
finished(Some(Box::new(ReadOnlyResource { content_type, body }))) finished(Some(Box::new(ReadOnlyResource { content_type, body })))
} }
@ -108,7 +110,12 @@ impl WikiLookup {
let diff_lookup = DiffLookup::new(state.clone()); let diff_lookup = DiffLookup::new(state.clone());
let search_lookup = SearchLookup::new(state.clone()); let search_lookup = SearchLookup::new(state.clone());
WikiLookup { state, changes_lookup, diff_lookup, search_lookup } WikiLookup {
state,
changes_lookup,
diff_lookup,
search_lookup,
}
} }
fn revisions_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Lookup>::Future { fn revisions_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Lookup>::Future {
@ -126,12 +133,12 @@ impl WikiLookup {
}; };
Box::new( Box::new(
self.state.get_article_revision(article_id, revision) self.state
.and_then(|article_revision| .get_article_revision(article_id, revision)
Ok(article_revision.map(move |x| Box::new( .and_then(|article_revision| {
ArticleRevisionResource::new(x) Ok(article_revision
) as BoxResource)) .map(move |x| Box::new(ArticleRevisionResource::new(x)) as BoxResource))
) }),
) )
} }
@ -148,14 +155,11 @@ impl WikiLookup {
Err(_) => return Box::new(finished(None)), Err(_) => return Box::new(finished(None)),
}; };
Box::new( Box::new(self.state.get_article_slug(article_id).and_then(|slug| {
self.state.get_article_slug(article_id) Ok(slug.map(|slug| {
.and_then(|slug| Box::new(TemporaryRedirectResource::new(format!("../{}", slug))) as BoxResource
Ok(slug.map(|slug| Box::new( }))
TemporaryRedirectResource::new(format!("../{}", slug)) }))
) as BoxResource))
)
)
} }
fn diff_lookup_f(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future { fn diff_lookup_f(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future {
@ -181,30 +185,30 @@ impl WikiLookup {
}; };
match (head.as_ref(), tail) { match (head.as_ref(), tail) {
("_about", None) => ("_about", None) => Box::new(finished(Some(
Box::new(finished(Some(Box::new(AboutResource::new()) as BoxResource))), Box::new(AboutResource::new()) as BoxResource
("_about", Some(license)) => ))),
Box::new(map_lookup(&LICENSES_MAP, license)), ("_about", Some(license)) => Box::new(map_lookup(&LICENSES_MAP, license)),
#[cfg(feature="dynamic-assets")] #[cfg(feature = "dynamic-assets")]
("_assets", Some(asset)) => ("_assets", Some(asset)) => Box::new(fs_lookup(
Box::new(fs_lookup(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"), asset)), concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"),
#[cfg(not(feature="dynamic-assets"))] asset,
("_assets", Some(asset)) => )),
Box::new(map_lookup(&ASSETS_MAP, asset)), #[cfg(not(feature = "dynamic-assets"))]
("_by_id", Some(tail)) => ("_assets", Some(asset)) => Box::new(map_lookup(&ASSETS_MAP, asset)),
self.by_id_lookup(tail, query), ("_by_id", Some(tail)) => self.by_id_lookup(tail, query),
("_changes", None) => ("_changes", None) => Box::new(self.changes_lookup.lookup(query)),
Box::new(self.changes_lookup.lookup(query)), ("_diff", Some(tail)) => self.diff_lookup_f(tail, query),
("_diff", Some(tail)) => ("_new", None) => Box::new(finished(Some(Box::new(NewArticleResource::new(
self.diff_lookup_f(tail, query), self.state.clone(),
("_new", None) => None,
Box::new(finished(Some(Box::new(NewArticleResource::new(self.state.clone(), None, true)) as BoxResource))), true,
("_revisions", Some(tail)) => )) as BoxResource))),
self.revisions_lookup(tail, query), ("_revisions", Some(tail)) => self.revisions_lookup(tail, query),
("_search", None) => ("_search", None) => Box::new(done(self.search_lookup.lookup(query))),
Box::new(done(self.search_lookup.lookup(query))), ("_sitemap", None) => Box::new(finished(Some(Box::new(SitemapResource::new(
("_sitemap", None) => self.state.clone(),
Box::new(finished(Some(Box::new(SitemapResource::new(self.state.clone())) as BoxResource))), )) as BoxResource))),
_ => Box::new(finished(None)), _ => Box::new(finished(None)),
} }
} }
@ -226,7 +230,7 @@ impl WikiLookup {
let slugified_slug = slugify(&slug); let slugified_slug = slugify(&slug);
if slugified_slug != slug { if slugified_slug != slug {
return Box::new(finished(Some( return Box::new(finished(Some(
Box::new(TemporaryRedirectResource::from_slug(slugified_slug, edit)) as BoxResource Box::new(TemporaryRedirectResource::from_slug(slugified_slug, edit)) as BoxResource,
))); )));
} }
@ -234,16 +238,22 @@ impl WikiLookup {
let slug = slug.into_owned(); let slug = slug.into_owned();
use crate::state::SlugLookup; use crate::state::SlugLookup;
Box::new(self.state.lookup_slug(slug.clone()) Box::new(self.state.lookup_slug(slug.clone()).and_then(move |x| {
.and_then(move |x| Ok(Some(match x { Ok(Some(match x {
SlugLookup::Miss => SlugLookup::Miss => {
Box::new(NewArticleResource::new(state, Some(slug), edit)) as BoxResource, Box::new(NewArticleResource::new(state, Some(slug), edit)) as BoxResource
SlugLookup::Hit { article_id, revision } => }
Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource, SlugLookup::Hit {
SlugLookup::Redirect(slug) => article_id,
Box::new(TemporaryRedirectResource::from_slug(slug, edit)) as BoxResource, revision,
}))) } => {
) Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource
}
SlugLookup::Redirect(slug) => {
Box::new(TemporaryRedirectResource::from_slug(slug, edit)) as BoxResource
}
}))
}))
} }
} }