cargo fmt
This commit is contained in:
parent
26fe2b64da
commit
9f80ced3ec
35 changed files with 1808 additions and 1167 deletions
50
build.rs
50
build.rs
|
@ -3,13 +3,15 @@
|
|||
// 1.3.0.
|
||||
#![allow(proc_macro_derive_resolution_fallback)]
|
||||
|
||||
#[macro_use] extern crate quote;
|
||||
#[macro_use] extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
extern crate diesel_migrations;
|
||||
extern crate walkdir;
|
||||
|
||||
use diesel::Connection;
|
||||
use diesel::prelude::*;
|
||||
use diesel::Connection;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::io::prelude::*;
|
||||
|
@ -26,27 +28,38 @@ mod sqlfunc {
|
|||
fn main() {
|
||||
let out_dir = env::var("OUT_DIR").expect("cargo must set OUT_DIR");
|
||||
let db_path = Path::new(&out_dir).join("build.db");
|
||||
let db_path = db_path.to_str().expect("Will only work for Unicode-representable paths");
|
||||
let db_path = db_path
|
||||
.to_str()
|
||||
.expect("Will only work for Unicode-representable paths");
|
||||
|
||||
let _ignore_failure = std::fs::remove_file(db_path);
|
||||
|
||||
let connection = SqliteConnection::establish(db_path)
|
||||
.expect(&format!("Error esablishing a database connection to {}", db_path));
|
||||
let connection = SqliteConnection::establish(db_path).expect(&format!(
|
||||
"Error esablishing a database connection to {}",
|
||||
db_path
|
||||
));
|
||||
|
||||
// Integer is a dummy placeholder. Compiling fails when passing ().
|
||||
diesel::expression::sql_literal::sql::<(diesel::sql_types::Integer)>("PRAGMA foreign_keys = ON")
|
||||
diesel::expression::sql_literal::sql::<(diesel::sql_types::Integer)>(
|
||||
"PRAGMA foreign_keys = ON",
|
||||
)
|
||||
.execute(&connection)
|
||||
.expect("Should be able to enable foreign keys");
|
||||
|
||||
sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() }).unwrap();
|
||||
sqlfunc::theme_from_str_hash::register_impl(&connection, |_: String| -> String { unreachable!() }).unwrap();
|
||||
sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() })
|
||||
.unwrap();
|
||||
sqlfunc::theme_from_str_hash::register_impl(&connection, |_: String| -> String {
|
||||
unreachable!()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
diesel_migrations::run_pending_migrations(&connection).unwrap();
|
||||
|
||||
let infer_schema_path = Path::new(&out_dir).join("infer_schema.rs");
|
||||
let mut file = File::create(infer_schema_path).expect("Unable to open file for writing");
|
||||
|
||||
file.write_all(quote! {
|
||||
file.write_all(
|
||||
quote! {
|
||||
mod __diesel_infer_schema_articles {
|
||||
infer_table_from_schema!(#db_path, "articles");
|
||||
}
|
||||
|
@ -56,18 +69,21 @@ fn main() {
|
|||
infer_table_from_schema!(#db_path, "article_revisions");
|
||||
}
|
||||
pub use self::__diesel_infer_schema_article_revisions::*;
|
||||
}.as_str().as_bytes()).expect("Unable to write to file");
|
||||
}
|
||||
.as_str()
|
||||
.as_bytes(),
|
||||
)
|
||||
.expect("Unable to write to file");
|
||||
|
||||
for entry in WalkDir::new("migrations").into_iter().filter_map(|e| e.ok()) {
|
||||
for entry in WalkDir::new("migrations")
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
{
|
||||
println!("cargo:rerun-if-changed={}", entry.path().display());
|
||||
}
|
||||
|
||||
// For build_config.rs
|
||||
for env_var in &[
|
||||
"CONTINUOUS_INTEGRATION",
|
||||
"TRAVIS_BRANCH",
|
||||
"TRAVIS_COMMIT",
|
||||
] {
|
||||
for env_var in &["CONTINUOUS_INTEGRATION", "TRAVIS_BRANCH", "TRAVIS_COMMIT"] {
|
||||
println!("cargo:rerun-if-env-changed={}", env_var);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
#![recursion_limit = "128"]
|
||||
|
||||
#[macro_use] extern crate quote;
|
||||
#[macro_use] extern crate serde_derive;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate base64;
|
||||
extern crate proc_macro;
|
||||
extern crate serde_json;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate sha2;
|
||||
extern crate syn;
|
||||
|
||||
|
|
|
@ -2,13 +2,10 @@ use std::fs::File;
|
|||
|
||||
use proc_macro::TokenStream;
|
||||
use quote;
|
||||
use serde_json;
|
||||
use serde::de::IgnoredAny;
|
||||
use serde_json;
|
||||
|
||||
const SOURCES: &[&str] = &[
|
||||
"src/licenses/license-hound.json",
|
||||
"src/licenses/other.json",
|
||||
];
|
||||
const SOURCES: &[&str] = &["src/licenses/license-hound.json", "src/licenses/other.json"];
|
||||
|
||||
#[derive(Debug, Copy, Clone, Deserialize)]
|
||||
pub enum LicenseId {
|
||||
|
@ -56,8 +53,12 @@ struct LicenseReport {
|
|||
impl quote::ToTokens for LicenseReport {
|
||||
fn to_tokens(&self, tokens: &mut quote::Tokens) {
|
||||
let c: &LicenseDescription = self.conclusion.as_ref().unwrap();
|
||||
let (name, link, copyright, license) =
|
||||
(&self.package_name, &c.link, &c.copyright_notice, &c.chosen_license);
|
||||
let (name, link, copyright, license) = (
|
||||
&self.package_name,
|
||||
&c.link,
|
||||
&c.copyright_notice,
|
||||
&c.chosen_license,
|
||||
);
|
||||
|
||||
let link = match link {
|
||||
&Some(ref link) => quote! { Some(#link) },
|
||||
|
@ -85,7 +86,10 @@ pub fn licenses(_input: TokenStream) -> TokenStream {
|
|||
.iter()
|
||||
.map(|x| -> Vec<LicenseReport> { serde_json::from_reader(File::open(x).unwrap()).unwrap() })
|
||||
.map(|x| x.into_iter().filter(|x| x.conclusion.is_ok()))
|
||||
.fold(vec![], |mut a, b| { a.extend(b); a });
|
||||
.fold(vec![], |mut a, b| {
|
||||
a.extend(b);
|
||||
a
|
||||
});
|
||||
|
||||
license_infos.sort_unstable_by_key(|x| x.package_name.to_lowercase());
|
||||
|
||||
|
|
|
@ -11,29 +11,28 @@ fn user_crate_root() -> PathBuf {
|
|||
}
|
||||
|
||||
fn find_attr<'a>(attrs: &'a Vec<syn::Attribute>, name: &str) -> Option<&'a str> {
|
||||
attrs.iter()
|
||||
attrs
|
||||
.iter()
|
||||
.find(|&x| x.name() == name)
|
||||
.and_then(|ref attr| match &attr.value {
|
||||
&syn::MetaItem::NameValue(_, syn::Lit::Str(ref template, _)) => Some(template),
|
||||
_ => None
|
||||
_ => None,
|
||||
})
|
||||
.map(|x| x.as_ref())
|
||||
}
|
||||
|
||||
fn buf_file<P: AsRef<Path>>(filename: P) -> Vec<u8> {
|
||||
let mut f = File::open(filename)
|
||||
.expect("Unable to open file for reading");
|
||||
let mut f = File::open(filename).expect("Unable to open file for reading");
|
||||
|
||||
let mut buf = Vec::new();
|
||||
f.read_to_end(&mut buf)
|
||||
.expect("Unable to read file");
|
||||
f.read_to_end(&mut buf).expect("Unable to read file");
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
fn calculate_checksum<P: AsRef<Path>>(filename: P) -> String {
|
||||
use base64::*;
|
||||
use sha2::{Sha256, Digest};
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
encode_config(&Sha256::digest(&buf_file(filename)), URL_SAFE)
|
||||
}
|
||||
|
@ -42,23 +41,24 @@ pub fn static_resource(input: TokenStream) -> TokenStream {
|
|||
let s = input.to_string();
|
||||
let ast = syn::parse_macro_input(&s).unwrap();
|
||||
|
||||
let filename = find_attr(&ast.attrs, "filename")
|
||||
.expect("The `filename` attribute must be specified");
|
||||
let filename =
|
||||
find_attr(&ast.attrs, "filename").expect("The `filename` attribute must be specified");
|
||||
let abs_filename = user_crate_root().join(filename);
|
||||
let abs_filename = abs_filename.to_str().expect("Absolute file path must be valid Unicode");
|
||||
let abs_filename = abs_filename
|
||||
.to_str()
|
||||
.expect("Absolute file path must be valid Unicode");
|
||||
|
||||
let checksum = calculate_checksum(&abs_filename);
|
||||
|
||||
let path: &Path = filename.as_ref();
|
||||
let resource_name =
|
||||
format!("{}-{}.{}",
|
||||
let resource_name = format!(
|
||||
"{}-{}.{}",
|
||||
path.file_stem().unwrap().to_str().unwrap(),
|
||||
checksum,
|
||||
path.extension().unwrap().to_str().unwrap()
|
||||
);
|
||||
|
||||
let mime = find_attr(&ast.attrs, "mime")
|
||||
.expect("The `mime` attribute must be specified");
|
||||
let mime = find_attr(&ast.attrs, "mime").expect("The `mime` attribute must be specified");
|
||||
|
||||
let name = &ast.ident;
|
||||
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#[cfg(not(feature = "dynamic-assets"))]
|
||||
mod static_assets {
|
||||
use std::collections::HashMap;
|
||||
use futures::Future;
|
||||
use crate::web::{Resource, ResponseFuture};
|
||||
use futures::Future;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// The CSS should be built to a single CSS file at compile time
|
||||
#[derive(StaticResource)]
|
||||
|
@ -59,22 +59,30 @@ pub use self::static_assets::*;
|
|||
mod dynamic_assets {
|
||||
pub struct ThemesCss;
|
||||
impl ThemesCss {
|
||||
pub fn resource_name() -> &'static str { "themes.css" }
|
||||
pub fn resource_name() -> &'static str {
|
||||
"themes.css"
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StyleCss;
|
||||
impl StyleCss {
|
||||
pub fn resource_name() -> &'static str { "style.css" }
|
||||
pub fn resource_name() -> &'static str {
|
||||
"style.css"
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ScriptJs;
|
||||
impl ScriptJs {
|
||||
pub fn resource_name() -> &'static str { "script.js" }
|
||||
pub fn resource_name() -> &'static str {
|
||||
"script.js"
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SearchJs;
|
||||
impl SearchJs {
|
||||
pub fn resource_name() -> &'static str { "search.js" }
|
||||
pub fn resource_name() -> &'static str {
|
||||
"search.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,14 +32,12 @@ lazy_static! {
|
|||
}
|
||||
|
||||
if let Some(commit) = option_env!("TRAVIS_COMMIT") {
|
||||
components.push(format!("commit:{}",
|
||||
components.push(format!(
|
||||
"commit:{}",
|
||||
commit
|
||||
.as_bytes()
|
||||
.chunks(4)
|
||||
.map(|x|
|
||||
String::from_utf8(x.to_owned())
|
||||
.unwrap_or_else(|_| String::new())
|
||||
)
|
||||
.map(|x| String::from_utf8(x.to_owned()).unwrap_or_else(|_| String::new()))
|
||||
.collect::<Vec<_>>()
|
||||
.join(SOFT_HYPHEN)
|
||||
));
|
||||
|
@ -51,7 +49,5 @@ lazy_static! {
|
|||
env!("CARGO_PKG_VERSION").to_string()
|
||||
}
|
||||
}();
|
||||
|
||||
pub static ref HTTP_SERVER: String =
|
||||
format!("{}/{}", PROJECT_NAME, VERSION.as_str());
|
||||
pub static ref HTTP_SERVER: String = format!("{}/{}", PROJECT_NAME, VERSION.as_str());
|
||||
}
|
||||
|
|
32
src/db.rs
32
src/db.rs
|
@ -1,5 +1,5 @@
|
|||
use diesel::prelude::*;
|
||||
use diesel::expression::sql_literal::sql;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sql_types::*;
|
||||
use r2d2::{CustomizeConnection, Pool};
|
||||
use r2d2_diesel::{self, ConnectionManager};
|
||||
|
@ -25,21 +25,23 @@ impl CustomizeConnection<SqliteConnection, r2d2_diesel::Error> for SqliteInitial
|
|||
.execute(conn)
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
|
||||
sqlfunc::markdown_to_fts::register_impl(
|
||||
conn,
|
||||
|text: String| rendering::render_markdown_for_fts(&text)
|
||||
).map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
sqlfunc::markdown_to_fts::register_impl(conn, |text: String| {
|
||||
rendering::render_markdown_for_fts(&text)
|
||||
})
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
|
||||
sqlfunc::theme_from_str_hash::register_impl(
|
||||
conn,
|
||||
|title: String| theme::theme_from_str_hash(&title)
|
||||
).map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
sqlfunc::theme_from_str_hash::register_impl(conn, |title: String| {
|
||||
theme::theme_from_str_hash(&title)
|
||||
})
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_pool<S: Into<String>>(connection_string: S) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<dyn (::std::error::Error)>> {
|
||||
pub fn create_pool<S: Into<String>>(
|
||||
connection_string: S,
|
||||
) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<dyn (::std::error::Error)>> {
|
||||
let manager = ConnectionManager::<SqliteConnection>::new(connection_string);
|
||||
let pool = Pool::builder()
|
||||
.connection_customizer(Box::new(SqliteInitializer {}))
|
||||
|
@ -72,7 +74,10 @@ mod test {
|
|||
let conn = test_connection();
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
struct Row { #[sql_type = "Text"] text: String }
|
||||
struct Row {
|
||||
#[sql_type = "Text"]
|
||||
text: String,
|
||||
}
|
||||
|
||||
let res = sql_query("SELECT markdown_to_fts('[link](url)') as text")
|
||||
.load::<Row>(&conn)
|
||||
|
@ -88,7 +93,10 @@ mod test {
|
|||
let conn = test_connection();
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
struct Row { #[sql_type = "Text"] theme: theme::Theme }
|
||||
struct Row {
|
||||
#[sql_type = "Text"]
|
||||
theme: theme::Theme,
|
||||
}
|
||||
|
||||
let res = sql_query("SELECT theme_from_str_hash('Bartefjes') as theme")
|
||||
.load::<Row>(&conn)
|
||||
|
|
65
src/lib.rs
65
src/lib.rs
|
@ -1,37 +1,53 @@
|
|||
#![recursion_limit="128"] // for diesel's infer_schema!
|
||||
#![recursion_limit = "128"]
|
||||
// for diesel's infer_schema!
|
||||
|
||||
// Diesel causes many warnings of the following kind. I expect this to be
|
||||
// fixed in a future release of Diesel. Currently used version of Diesel is
|
||||
// 1.3.0.
|
||||
#![allow(proc_macro_derive_resolution_fallback)]
|
||||
|
||||
#[cfg(test)] #[macro_use] extern crate matches;
|
||||
#[cfg(test)] #[macro_use] extern crate indoc;
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate matches;
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate indoc;
|
||||
|
||||
#[macro_use] extern crate bart_derive;
|
||||
#[macro_use] extern crate codegen;
|
||||
#[macro_use] #[allow(deprecated)] extern crate diesel_infer_schema;
|
||||
#[macro_use] extern crate diesel_migrations;
|
||||
#[macro_use] extern crate diesel;
|
||||
#[macro_use] extern crate hyper;
|
||||
#[macro_use] extern crate lazy_static;
|
||||
#[macro_use] extern crate maplit;
|
||||
#[macro_use] extern crate serde_derive;
|
||||
#[macro_use] extern crate serde_plain;
|
||||
#[macro_use]
|
||||
extern crate bart_derive;
|
||||
#[macro_use]
|
||||
extern crate codegen;
|
||||
#[macro_use]
|
||||
#[allow(deprecated)]
|
||||
extern crate diesel_infer_schema;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate hyper;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate maplit;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
#[macro_use]
|
||||
extern crate serde_plain;
|
||||
|
||||
extern crate chrono;
|
||||
extern crate diff;
|
||||
extern crate futures_cpupool;
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate percent_encoding;
|
||||
extern crate pulldown_cmark;
|
||||
extern crate r2d2_diesel;
|
||||
extern crate r2d2;
|
||||
extern crate r2d2_diesel;
|
||||
extern crate rand;
|
||||
extern crate seahash;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate serde_urlencoded;
|
||||
extern crate serde;
|
||||
extern crate slug;
|
||||
extern crate titlecase;
|
||||
|
||||
|
@ -52,19 +68,22 @@ mod theme;
|
|||
mod web;
|
||||
mod wiki_lookup;
|
||||
|
||||
pub fn main(db_file: String, bind_host: IpAddr, bind_port: u16, trust_identity: bool) -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub fn main(
|
||||
db_file: String,
|
||||
bind_host: IpAddr,
|
||||
bind_port: u16,
|
||||
trust_identity: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let db_pool = db::create_pool(db_file)?;
|
||||
let cpu_pool = futures_cpupool::CpuPool::new_num_cpus();
|
||||
|
||||
let state = state::State::new(db_pool, cpu_pool);
|
||||
let lookup = wiki_lookup::WikiLookup::new(state, trust_identity);
|
||||
|
||||
let server =
|
||||
hyper::server::Http::new()
|
||||
.bind(
|
||||
&SocketAddr::new(bind_host, bind_port),
|
||||
move || Ok(site::Site::new(lookup.clone(), trust_identity))
|
||||
)?;
|
||||
let server = hyper::server::Http::new()
|
||||
.bind(&SocketAddr::new(bind_host, bind_port), move || {
|
||||
Ok(site::Site::new(lookup.clone(), trust_identity))
|
||||
})?;
|
||||
|
||||
println!("Listening on http://{}", server.local_addr().unwrap());
|
||||
|
||||
|
|
50
src/main.rs
50
src/main.rs
|
@ -1,4 +1,5 @@
|
|||
#[macro_use] extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
extern crate clap;
|
||||
extern crate sausagewiki;
|
||||
|
||||
|
@ -18,34 +19,44 @@ fn args<'a>() -> clap::ArgMatches<'a> {
|
|||
App::new(PROJECT_NAME)
|
||||
.version(VERSION.as_str())
|
||||
.about(env!("CARGO_PKG_DESCRIPTION"))
|
||||
.arg(Arg::with_name(DATABASE)
|
||||
.arg(
|
||||
Arg::with_name(DATABASE)
|
||||
.help("Sets the database file to use")
|
||||
.required(true))
|
||||
.arg(Arg::with_name(PORT)
|
||||
.required(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(PORT)
|
||||
.help("Sets the listening port")
|
||||
.short("p")
|
||||
.long(PORT)
|
||||
.default_value("8080")
|
||||
.validator(|x| match x.parse::<u16>() {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err("Must be an integer in the range [0, 65535]".into())
|
||||
Err(_) => Err("Must be an integer in the range [0, 65535]".into()),
|
||||
})
|
||||
.takes_value(true))
|
||||
.arg(Arg::with_name(ADDRESS)
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(ADDRESS)
|
||||
.help("Sets the IP address to bind to")
|
||||
.short("a")
|
||||
.long(ADDRESS)
|
||||
.default_value("127.0.0.1")
|
||||
.validator(|x| match x.parse::<IpAddr>() {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err("Must be a valid IP address".into())
|
||||
Err(_) => Err("Must be a valid IP address".into()),
|
||||
})
|
||||
.takes_value(true))
|
||||
.arg(Arg::with_name(TRUST_IDENTITY)
|
||||
.help("Trust the value in the X-Identity header to be an \
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(TRUST_IDENTITY)
|
||||
.help(
|
||||
"Trust the value in the X-Identity header to be an \
|
||||
authenticated username. This only makes sense when Sausagewiki \
|
||||
runs behind a reverse proxy which sets this header.")
|
||||
.long(TRUST_IDENTITY))
|
||||
runs behind a reverse proxy which sets this header.",
|
||||
)
|
||||
.long(TRUST_IDENTITY),
|
||||
)
|
||||
.get_matches()
|
||||
}
|
||||
|
||||
|
@ -55,15 +66,14 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
const CLAP: &str = "Guaranteed by clap";
|
||||
const VALIDATOR: &str = "Guaranteed by clap validator";
|
||||
let db_file = args.value_of(DATABASE).expect(CLAP).to_owned();
|
||||
let bind_host = args.value_of(ADDRESS).expect(CLAP).parse().expect(VALIDATOR);
|
||||
let bind_host = args
|
||||
.value_of(ADDRESS)
|
||||
.expect(CLAP)
|
||||
.parse()
|
||||
.expect(VALIDATOR);
|
||||
let bind_port = args.value_of(PORT).expect(CLAP).parse().expect(VALIDATOR);
|
||||
|
||||
let trust_identity = args.is_present(TRUST_IDENTITY);
|
||||
|
||||
sausagewiki::main(
|
||||
db_file,
|
||||
bind_host,
|
||||
bind_port,
|
||||
trust_identity,
|
||||
)
|
||||
sausagewiki::main(db_file, bind_host, bind_port, trust_identity)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::fmt::Debug;
|
||||
use diff;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Chunk<'a, Item: 'a + Debug + PartialEq + Copy>(
|
||||
pub &'a [diff::Result<Item>],
|
||||
pub &'a [diff::Result<Item>]
|
||||
pub &'a [diff::Result<Item>],
|
||||
);
|
||||
|
|
|
@ -7,7 +7,7 @@ use super::chunk::Chunk;
|
|||
|
||||
pub struct ChunkIterator<'a, Item>
|
||||
where
|
||||
Item: 'a + Debug + PartialEq
|
||||
Item: 'a + Debug + PartialEq,
|
||||
{
|
||||
left: &'a [diff::Result<Item>],
|
||||
right: &'a [diff::Result<Item>],
|
||||
|
@ -15,16 +15,19 @@ where
|
|||
|
||||
impl<'a, Item> ChunkIterator<'a, Item>
|
||||
where
|
||||
Item: 'a + Debug + PartialEq + Eq
|
||||
Item: 'a + Debug + PartialEq + Eq,
|
||||
{
|
||||
pub fn new(left: &'a [diff::Result<Item>], right: &'a [diff::Result<Item>]) -> ChunkIterator<'a, Item> {
|
||||
pub fn new(
|
||||
left: &'a [diff::Result<Item>],
|
||||
right: &'a [diff::Result<Item>],
|
||||
) -> ChunkIterator<'a, Item> {
|
||||
ChunkIterator { left, right }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Item> Iterator for ChunkIterator<'a, Item>
|
||||
where
|
||||
Item: 'a + Debug + PartialEq + Copy
|
||||
Item: 'a + Debug + PartialEq + Copy,
|
||||
{
|
||||
type Item = Chunk<'a, Item>;
|
||||
|
||||
|
@ -46,18 +49,18 @@ where
|
|||
match (self.left.get(li), self.right.get(ri)) {
|
||||
(Some(&Right(_)), _) => {
|
||||
li += 1;
|
||||
},
|
||||
}
|
||||
(_, Some(&Right(_))) => {
|
||||
ri += 1;
|
||||
},
|
||||
}
|
||||
(Some(&Left(_)), Some(_)) => {
|
||||
li += 1;
|
||||
ri += 1;
|
||||
},
|
||||
}
|
||||
(Some(_), Some(&Left(_))) => {
|
||||
li += 1;
|
||||
ri += 1;
|
||||
},
|
||||
}
|
||||
(Some(&Both(..)), Some(&Both(..))) => {
|
||||
let chunk = Chunk(&self.left[..li], &self.right[..ri]);
|
||||
self.left = &self.left[li..];
|
||||
|
@ -94,13 +97,16 @@ mod test {
|
|||
|
||||
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(vec![
|
||||
assert_eq!(
|
||||
vec![
|
||||
Chunk(&oa[0..3], &ob[0..3]),
|
||||
Chunk(&oa[3..6], &ob[3..3]),
|
||||
Chunk(&oa[6..9], &ob[3..6]),
|
||||
Chunk(&oa[9..9], &ob[6..9]),
|
||||
Chunk(&oa[9..12], &ob[9..12]),
|
||||
], chunks);
|
||||
],
|
||||
chunks
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -113,11 +119,14 @@ mod test {
|
|||
let ob = diff::chars(o, b);
|
||||
|
||||
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
|
||||
assert_eq!(vec![
|
||||
assert_eq!(
|
||||
vec![
|
||||
Chunk(&oa[0..3], &ob[0..3]),
|
||||
Chunk(&oa[3..9], &ob[3..9]),
|
||||
Chunk(&oa[9..12], &ob[9..12]),
|
||||
], chunks);
|
||||
],
|
||||
chunks
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -130,10 +139,10 @@ mod test {
|
|||
let ob = diff::chars(o, b);
|
||||
|
||||
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
|
||||
assert_eq!(vec![
|
||||
Chunk(&oa[0..9], &ob[0.. 9]),
|
||||
Chunk(&oa[9..9], &ob[9..12]),
|
||||
], chunks);
|
||||
assert_eq!(
|
||||
vec![Chunk(&oa[0..9], &ob[0..9]), Chunk(&oa[9..9], &ob[9..12]),],
|
||||
chunks
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -146,10 +155,10 @@ mod test {
|
|||
let ob = diff::chars(o, b);
|
||||
|
||||
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
|
||||
assert_eq!(vec![
|
||||
Chunk(&oa[0..6], &ob[0.. 6]),
|
||||
Chunk(&oa[6..9], &ob[6..12]),
|
||||
], chunks);
|
||||
assert_eq!(
|
||||
vec![Chunk(&oa[0..6], &ob[0..6]), Chunk(&oa[6..9], &ob[6..12]),],
|
||||
chunks
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -162,8 +171,6 @@ mod test {
|
|||
let ob = diff::chars(o, b);
|
||||
|
||||
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
|
||||
assert_eq!(vec![
|
||||
Chunk(&oa[0..6], &ob[0..6]),
|
||||
], chunks);
|
||||
assert_eq!(vec![Chunk(&oa[0..6], &ob[0..6]),], chunks);
|
||||
}
|
||||
}
|
||||
|
|
164
src/merge/mod.rs
164
src/merge/mod.rs
|
@ -1,5 +1,5 @@
|
|||
mod chunk_iterator;
|
||||
mod chunk;
|
||||
mod chunk_iterator;
|
||||
mod output;
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
@ -7,8 +7,8 @@ use std::fmt::Debug;
|
|||
use diff;
|
||||
|
||||
use self::chunk_iterator::ChunkIterator;
|
||||
use self::output::*;
|
||||
use self::output::Output::Resolved;
|
||||
use self::output::*;
|
||||
|
||||
pub use self::output::Output;
|
||||
|
||||
|
@ -22,9 +22,9 @@ impl<'a> MergeResult<&'a str> {
|
|||
pub fn to_strings(self) -> MergeResult<String> {
|
||||
match self {
|
||||
MergeResult::Clean(x) => MergeResult::Clean(x),
|
||||
MergeResult::Conflicted(x) => MergeResult::Conflicted(
|
||||
x.into_iter().map(Output::to_strings).collect()
|
||||
)
|
||||
MergeResult::Conflicted(x) => {
|
||||
MergeResult::Conflicted(x.into_iter().map(Output::to_strings).collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,8 +33,8 @@ impl MergeResult<String> {
|
|||
pub fn flatten(self) -> String {
|
||||
match self {
|
||||
MergeResult::Clean(x) => x,
|
||||
MergeResult::Conflicted(x) => {
|
||||
x.into_iter()
|
||||
MergeResult::Conflicted(x) => x
|
||||
.into_iter()
|
||||
.flat_map(|out| match out {
|
||||
Output::Conflict(a, _o, b) => {
|
||||
let mut x: Vec<String> = vec![];
|
||||
|
@ -44,12 +44,10 @@ impl MergeResult<String> {
|
|||
x.extend(b.into_iter().map(|x| format!("{}\n", x)));
|
||||
x.push(">>>>>>> Conflict ends here\n".into());
|
||||
x
|
||||
},
|
||||
Output::Resolved(x) =>
|
||||
x.into_iter().map(|x| format!("{}\n", x)).collect(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
Output::Resolved(x) => x.into_iter().map(|x| format!("{}\n", x)).collect(),
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -58,8 +56,8 @@ impl MergeResult<char> {
|
|||
pub fn flatten(self) -> String {
|
||||
match self {
|
||||
MergeResult::Clean(x) => x,
|
||||
MergeResult::Conflicted(x) => {
|
||||
x.into_iter()
|
||||
MergeResult::Conflicted(x) => x
|
||||
.into_iter()
|
||||
.flat_map(|out| match out {
|
||||
Output::Conflict(a, _o, b) => {
|
||||
let mut x: Vec<char> = vec![];
|
||||
|
@ -69,11 +67,10 @@ impl MergeResult<char> {
|
|||
x.extend(b);
|
||||
x.push('>');
|
||||
x
|
||||
},
|
||||
}
|
||||
Output::Resolved(x) => x,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +82,10 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
|
|||
let chunks = ChunkIterator::new(&oa, &ob);
|
||||
let hunks: Vec<_> = chunks.map(resolve).collect();
|
||||
|
||||
let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false });
|
||||
let clean = hunks.iter().all(|x| match x {
|
||||
&Resolved(..) => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if clean {
|
||||
MergeResult::Clean(
|
||||
|
@ -93,10 +93,10 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
|
|||
.into_iter()
|
||||
.flat_map(|x| match x {
|
||||
Resolved(y) => y.into_iter(),
|
||||
_ => unreachable!()
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
.join("\n"),
|
||||
)
|
||||
} else {
|
||||
MergeResult::Conflicted(hunks)
|
||||
|
@ -110,7 +110,10 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
|
|||
let chunks = ChunkIterator::new(&oa, &ob);
|
||||
let hunks: Vec<_> = chunks.map(resolve).collect();
|
||||
|
||||
let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false });
|
||||
let clean = hunks.iter().all(|x| match x {
|
||||
&Resolved(..) => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if clean {
|
||||
MergeResult::Clean(
|
||||
|
@ -118,9 +121,9 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
|
|||
.into_iter()
|
||||
.flat_map(|x| match x {
|
||||
Resolved(y) => y.into_iter(),
|
||||
_ => unreachable!()
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.collect()
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
MergeResult::Conflicted(hunks)
|
||||
|
@ -131,9 +134,9 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
|
|||
mod test {
|
||||
use diff;
|
||||
|
||||
use super::*;
|
||||
use super::output::*;
|
||||
use super::output::Output::*;
|
||||
use super::output::*;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_case() {
|
||||
|
@ -145,106 +148,141 @@ mod test {
|
|||
chunks.map(resolve).collect()
|
||||
}
|
||||
|
||||
assert_eq!(vec![
|
||||
assert_eq!(
|
||||
vec![
|
||||
Resolved("aaa".chars().collect()),
|
||||
Resolved("xxx".chars().collect()),
|
||||
Resolved("bbb".chars().collect()),
|
||||
Resolved("yyy".chars().collect()),
|
||||
Resolved("ccc".chars().collect()),
|
||||
], merge_chars(
|
||||
"aaaxxxbbbccc",
|
||||
"aaabbbccc",
|
||||
"aaabbbyyyccc",
|
||||
));
|
||||
],
|
||||
merge_chars("aaaxxxbbbccc", "aaabbbccc", "aaabbbyyyccc",)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clean_case() {
|
||||
assert_eq!(MergeResult::Clean(indoc!("
|
||||
assert_eq!(
|
||||
MergeResult::Clean(
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
bbb
|
||||
yyy
|
||||
ccc
|
||||
").into()), merge_lines(
|
||||
indoc!("
|
||||
"
|
||||
)
|
||||
.into()
|
||||
),
|
||||
merge_lines(
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
bbb
|
||||
yyy
|
||||
ccc
|
||||
"),
|
||||
));
|
||||
"
|
||||
),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clean_case_chars() {
|
||||
assert_eq!(MergeResult::Clean("Title".into()), merge_chars(
|
||||
"Titlle",
|
||||
"titlle",
|
||||
"title",
|
||||
));
|
||||
assert_eq!(
|
||||
MergeResult::Clean("Title".into()),
|
||||
merge_chars("Titlle", "titlle", "title",)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn false_conflict() {
|
||||
assert_eq!(MergeResult::Clean(indoc!("
|
||||
assert_eq!(
|
||||
MergeResult::Clean(
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
ccc
|
||||
").into()), merge_lines(
|
||||
indoc!("
|
||||
"
|
||||
)
|
||||
.into()
|
||||
),
|
||||
merge_lines(
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
ccc
|
||||
"),
|
||||
));
|
||||
"
|
||||
),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn true_conflict() {
|
||||
assert_eq!(MergeResult::Conflicted(vec![
|
||||
assert_eq!(
|
||||
MergeResult::Conflicted(vec![
|
||||
Resolved(vec!["aaa"]),
|
||||
Conflict(vec!["xxx"], vec![], vec!["yyy"]),
|
||||
Resolved(vec!["bbb", "ccc", ""]),
|
||||
]), merge_lines(
|
||||
indoc!("
|
||||
]),
|
||||
merge_lines(
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
xxx
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
indoc!("
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
"
|
||||
aaa
|
||||
yyy
|
||||
bbb
|
||||
ccc
|
||||
"),
|
||||
));
|
||||
"
|
||||
),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,9 +47,7 @@ fn choose_right<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> {
|
|||
}
|
||||
|
||||
fn no_change<Item>(operations: &[diff::Result<Item>]) -> bool {
|
||||
operations
|
||||
.iter()
|
||||
.all(|x| match x {
|
||||
operations.iter().all(|x| match x {
|
||||
&Both(..) => true,
|
||||
_ => false,
|
||||
})
|
||||
|
@ -78,83 +76,43 @@ pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>)
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use diff::Result::*;
|
||||
use super::*;
|
||||
use diff::Result::*;
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
assert_eq!(
|
||||
Output::Resolved(vec![]),
|
||||
resolve::<i32>(Chunk(&[], &[]))
|
||||
);
|
||||
assert_eq!(Output::Resolved(vec![]), resolve::<i32>(Chunk(&[], &[])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same() {
|
||||
assert_eq!(
|
||||
Output::Resolved(vec![
|
||||
1
|
||||
]),
|
||||
resolve::<i32>(Chunk(
|
||||
&[Both(1, 1)],
|
||||
&[Both(1, 1)]
|
||||
))
|
||||
Output::Resolved(vec![1]),
|
||||
resolve::<i32>(Chunk(&[Both(1, 1)], &[Both(1, 1)]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn only_left() {
|
||||
assert_eq!(
|
||||
Output::Resolved(vec![
|
||||
2
|
||||
]),
|
||||
resolve::<i32>(Chunk(
|
||||
&[
|
||||
Left(1),
|
||||
Right(2)
|
||||
],
|
||||
&[]
|
||||
))
|
||||
Output::Resolved(vec![2]),
|
||||
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn false_conflict() {
|
||||
assert_eq!(
|
||||
Output::Resolved(vec![
|
||||
2
|
||||
]),
|
||||
resolve::<i32>(Chunk(
|
||||
&[
|
||||
Left(1),
|
||||
Right(2)
|
||||
],
|
||||
&[
|
||||
Left(1),
|
||||
Right(2)
|
||||
],
|
||||
))
|
||||
Output::Resolved(vec![2]),
|
||||
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(2)],))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn real_conflict() {
|
||||
assert_eq!(
|
||||
Output::Conflict(
|
||||
vec![2],
|
||||
vec![1],
|
||||
vec![3],
|
||||
),
|
||||
resolve::<i32>(Chunk(
|
||||
&[
|
||||
Left(1),
|
||||
Right(2)
|
||||
],
|
||||
&[
|
||||
Left(1),
|
||||
Right(3)
|
||||
],
|
||||
))
|
||||
Output::Conflict(vec![2], vec![1], vec![3],),
|
||||
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(3)],))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,9 @@ pub struct ArticleRevision {
|
|||
}
|
||||
|
||||
impl ArticleRevision {
|
||||
pub fn link(&self) -> &str { slug_link(&self.slug) }
|
||||
pub fn link(&self) -> &str {
|
||||
slug_link(&self.slug)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Queryable)]
|
||||
|
@ -52,7 +54,9 @@ pub struct ArticleRevisionStub {
|
|||
}
|
||||
|
||||
impl ArticleRevisionStub {
|
||||
pub fn link(&self) -> &str { slug_link(&self.slug) }
|
||||
pub fn link(&self) -> &str {
|
||||
slug_link(&self.slug)
|
||||
}
|
||||
}
|
||||
|
||||
use diesel::sql_types::Text;
|
||||
|
@ -69,5 +73,7 @@ pub struct SearchResult {
|
|||
}
|
||||
|
||||
impl SearchResult {
|
||||
pub fn link(&self) -> &str { slug_link(&self.slug) }
|
||||
pub fn link(&self) -> &str {
|
||||
slug_link(&self.slug)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use pulldown_cmark::Event::{End, Text};
|
||||
use pulldown_cmark::{html, Parser, Tag, OPTION_DISABLE_HTML, OPTION_ENABLE_TABLES};
|
||||
use slug::slugify;
|
||||
use pulldown_cmark::{Parser, Tag, html, OPTION_ENABLE_TABLES, OPTION_DISABLE_HTML};
|
||||
use pulldown_cmark::Event::{Text, End};
|
||||
|
||||
fn slugify_link(text: &str, title: &str) -> Option<(String, String)> {
|
||||
Some((slugify(text), title.to_owned()))
|
||||
|
@ -28,8 +28,7 @@ pub fn render_markdown_for_fts(src: &str) -> String {
|
|||
|
||||
for event in p {
|
||||
match event {
|
||||
Text(text) =>
|
||||
buf.push_str(&text.replace(is_html_special, " ")),
|
||||
Text(text) => buf.push_str(&text.replace(is_html_special, " ")),
|
||||
End(Tag::Link(uri, _title)) => {
|
||||
buf.push_str(" (");
|
||||
buf.push_str(&uri.replace(is_html_special, " "));
|
||||
|
|
|
@ -56,11 +56,13 @@ struct LicenseInfo {
|
|||
#[derive(BartDisplay)]
|
||||
#[template = "templates/about.html"]
|
||||
struct Template<'a> {
|
||||
deps: &'a [LicenseInfo]
|
||||
deps: &'a [LicenseInfo],
|
||||
}
|
||||
|
||||
impl<'a> Template<'a> {
|
||||
fn version(&self) -> &str { &build_config::VERSION }
|
||||
fn version(&self) -> &str {
|
||||
&build_config::VERSION
|
||||
}
|
||||
}
|
||||
|
||||
impl Resource for AboutResource {
|
||||
|
@ -70,24 +72,27 @@ impl Resource for AboutResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
let head = self.head();
|
||||
|
||||
Box::new(head
|
||||
.and_then(move |head| {
|
||||
Ok(head.with_body(system_page(
|
||||
Box::new(head.and_then(move |head| {
|
||||
Ok(head.with_body(
|
||||
system_page(
|
||||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"About Sausagewiki",
|
||||
Template {
|
||||
deps: &*LICENSE_INFOS
|
||||
deps: &*LICENSE_INFOS,
|
||||
},
|
||||
).to_string()))
|
||||
)
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{TimeZone, DateTime, Local};
|
||||
use chrono::{DateTime, Local, TimeZone};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
use hyper::header::{ContentType, Location};
|
||||
|
@ -10,7 +10,7 @@ use crate::assets::ScriptJs;
|
|||
use crate::mimes::*;
|
||||
use crate::rendering::render_markdown;
|
||||
use crate::site::Layout;
|
||||
use crate::state::{State, UpdateResult, RebaseConflict};
|
||||
use crate::state::{RebaseConflict, State, UpdateResult};
|
||||
use crate::theme::{self, Theme};
|
||||
use crate::web::{Resource, ResponseFuture};
|
||||
|
||||
|
@ -58,7 +58,12 @@ pub struct ArticleResource {
|
|||
|
||||
impl ArticleResource {
|
||||
pub fn new(state: State, article_id: i32, revision: i32, edit: bool) -> Self {
|
||||
Self { state, article_id, revision, edit }
|
||||
Self {
|
||||
state,
|
||||
article_id,
|
||||
revision,
|
||||
edit,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,12 +83,23 @@ pub fn last_updated(article_id: i32, created: &DateTime<Local>, author: Option<&
|
|||
|
||||
Template {
|
||||
created: &created.to_rfc2822(),
|
||||
article_history: &format!("_changes{}", QueryParameters::default().article_id(Some(article_id)).into_link()),
|
||||
article_history: &format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
.article_id(Some(article_id))
|
||||
.into_link()
|
||||
),
|
||||
author: author.map(|author| Author {
|
||||
author: &author,
|
||||
history: format!("_changes{}", QueryParameters::default().author(Some(author.to_owned())).into_link()),
|
||||
history: format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
.author(Some(author.to_owned()))
|
||||
.into_link()
|
||||
),
|
||||
}),
|
||||
}.to_string()
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
|
||||
impl Resource for ArticleResource {
|
||||
|
@ -93,21 +109,23 @@ impl Resource for ArticleResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
let data = self.state.get_article_revision(self.article_id, self.revision)
|
||||
let data = self
|
||||
.state
|
||||
.get_article_revision(self.article_id, self.revision)
|
||||
.map(|x| x.expect("Data model guarantees that this exists"));
|
||||
let head = self.head();
|
||||
|
||||
Box::new(data.join(head)
|
||||
.and_then(move |(data, head)| {
|
||||
Ok(head
|
||||
.with_body(Layout {
|
||||
Box::new(data.join(head).and_then(move |(data, head)| {
|
||||
Ok(head.with_body(
|
||||
Layout {
|
||||
base: None, // Hmm, should perhaps accept `base` as argument
|
||||
title: &data.title,
|
||||
theme: data.theme,
|
||||
|
@ -116,19 +134,24 @@ impl Resource for ArticleResource {
|
|||
last_updated: Some(&last_updated(
|
||||
data.article_id,
|
||||
&Local.from_utc_datetime(&data.created),
|
||||
data.author.as_ref().map(|x| &**x)
|
||||
data.author.as_ref().map(|x| &**x),
|
||||
)),
|
||||
edit: self.edit,
|
||||
cancel_url: Some(data.link()),
|
||||
title: &data.title,
|
||||
raw: &data.body,
|
||||
rendered: render_markdown(&data.body),
|
||||
themes: &theme::THEMES.iter().map(|&x| SelectableTheme {
|
||||
themes: &theme::THEMES
|
||||
.iter()
|
||||
.map(|&x| SelectableTheme {
|
||||
theme: x,
|
||||
selected: x == data.theme,
|
||||
}).collect::<Vec<_>>(),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
},
|
||||
}.to_string()))
|
||||
}
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -156,22 +179,26 @@ impl Resource for ArticleResource {
|
|||
last_updated: &'a str,
|
||||
}
|
||||
|
||||
Box::new(body
|
||||
.concat2()
|
||||
Box::new(
|
||||
body.concat2()
|
||||
.map_err(Into::into)
|
||||
.and_then(|body| {
|
||||
serde_urlencoded::from_bytes(&body)
|
||||
.map_err(Into::into)
|
||||
})
|
||||
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
|
||||
.and_then(move |update: UpdateArticle| {
|
||||
self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity, update.theme)
|
||||
self.state.update_article(
|
||||
self.article_id,
|
||||
update.base_revision,
|
||||
update.title,
|
||||
update.body,
|
||||
identity,
|
||||
update.theme,
|
||||
)
|
||||
})
|
||||
.and_then(|updated| match updated {
|
||||
UpdateResult::Success(updated) =>
|
||||
Ok(Response::new()
|
||||
UpdateResult::Success(updated) => Ok(Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(APPLICATION_JSON.clone()))
|
||||
.with_body(serde_json::to_string(&PutResponse {
|
||||
.with_body(
|
||||
serde_json::to_string(&PutResponse {
|
||||
conflict: false,
|
||||
slug: &updated.slug,
|
||||
revision: updated.revision,
|
||||
|
@ -181,23 +208,29 @@ impl Resource for ArticleResource {
|
|||
rendered: &Template {
|
||||
title: &updated.title,
|
||||
rendered: render_markdown(&updated.body),
|
||||
}.to_string(),
|
||||
}
|
||||
.to_string(),
|
||||
last_updated: &last_updated(
|
||||
updated.article_id,
|
||||
&Local.from_utc_datetime(&updated.created),
|
||||
updated.author.as_ref().map(|x| &**x)
|
||||
),
|
||||
}).expect("Should never fail"))
|
||||
updated.author.as_ref().map(|x| &**x),
|
||||
),
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
)),
|
||||
UpdateResult::RebaseConflict(RebaseConflict {
|
||||
base_article, title, body, theme
|
||||
base_article,
|
||||
title,
|
||||
body,
|
||||
theme,
|
||||
}) => {
|
||||
let title = title.flatten();
|
||||
let body = body.flatten();
|
||||
Ok(Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(APPLICATION_JSON.clone()))
|
||||
.with_body(serde_json::to_string(&PutResponse {
|
||||
.with_body(
|
||||
serde_json::to_string(&PutResponse {
|
||||
conflict: true,
|
||||
slug: &base_article.slug,
|
||||
revision: base_article.revision,
|
||||
|
@ -207,16 +240,18 @@ impl Resource for ArticleResource {
|
|||
rendered: &Template {
|
||||
title: &title,
|
||||
rendered: render_markdown(&body),
|
||||
}.to_string(),
|
||||
}
|
||||
.to_string(),
|
||||
last_updated: &last_updated(
|
||||
base_article.article_id,
|
||||
&Local.from_utc_datetime(&base_article.created),
|
||||
base_article.author.as_ref().map(|x| &**x)
|
||||
base_article.author.as_ref().map(|x| &**x),
|
||||
),
|
||||
}).expect("Should never fail"))
|
||||
)
|
||||
}
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
))
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -225,33 +260,39 @@ impl Resource for ArticleResource {
|
|||
|
||||
use futures::Stream;
|
||||
|
||||
Box::new(body
|
||||
.concat2()
|
||||
Box::new(
|
||||
body.concat2()
|
||||
.map_err(Into::into)
|
||||
.and_then(|body| {
|
||||
serde_urlencoded::from_bytes(&body)
|
||||
.map_err(Into::into)
|
||||
})
|
||||
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
|
||||
.and_then(move |update: UpdateArticle| {
|
||||
self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity, update.theme)
|
||||
self.state.update_article(
|
||||
self.article_id,
|
||||
update.base_revision,
|
||||
update.title,
|
||||
update.body,
|
||||
identity,
|
||||
update.theme,
|
||||
)
|
||||
})
|
||||
.and_then(|updated| {
|
||||
match updated {
|
||||
.and_then(|updated| match updated {
|
||||
UpdateResult::Success(updated) => Ok(Response::new()
|
||||
.with_status(hyper::StatusCode::SeeOther)
|
||||
.with_header(ContentType(TEXT_PLAIN.clone()))
|
||||
.with_header(Location::new(updated.link().to_owned()))
|
||||
.with_body("See other")
|
||||
),
|
||||
.with_body("See other")),
|
||||
UpdateResult::RebaseConflict(RebaseConflict {
|
||||
base_article, title, body, theme
|
||||
base_article,
|
||||
title,
|
||||
body,
|
||||
theme,
|
||||
}) => {
|
||||
let title = title.flatten();
|
||||
let body = body.flatten();
|
||||
Ok(Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_body(Layout {
|
||||
.with_body(
|
||||
Layout {
|
||||
base: None,
|
||||
title: &title,
|
||||
theme,
|
||||
|
@ -260,23 +301,26 @@ impl Resource for ArticleResource {
|
|||
last_updated: Some(&last_updated(
|
||||
base_article.article_id,
|
||||
&Local.from_utc_datetime(&base_article.created),
|
||||
base_article.author.as_ref().map(|x| &**x)
|
||||
base_article.author.as_ref().map(|x| &**x),
|
||||
)),
|
||||
edit: true,
|
||||
cancel_url: Some(base_article.link()),
|
||||
title: &title,
|
||||
raw: &body,
|
||||
rendered: render_markdown(&body),
|
||||
themes: &theme::THEMES.iter().map(|&x| SelectableTheme {
|
||||
themes: &theme::THEMES
|
||||
.iter()
|
||||
.map(|&x| SelectableTheme {
|
||||
theme: x,
|
||||
selected: x == theme,
|
||||
}).collect::<Vec<_>>(),
|
||||
},
|
||||
}.to_string())
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
},
|
||||
}
|
||||
.to_string(),
|
||||
))
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{TimeZone, DateTime, Local};
|
||||
use chrono::{DateTime, Local, TimeZone};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
use hyper::header::ContentType;
|
||||
|
@ -24,7 +24,12 @@ impl ArticleRevisionResource {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &DateTime<Local>, author: Option<&str>) -> String {
|
||||
pub fn timestamp_and_author(
|
||||
sequence_number: i32,
|
||||
article_id: i32,
|
||||
created: &DateTime<Local>,
|
||||
author: Option<&str>,
|
||||
) -> String {
|
||||
struct Author<'a> {
|
||||
author: &'a str,
|
||||
history: String,
|
||||
|
@ -42,7 +47,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
|
|||
|
||||
Template {
|
||||
created: &created.to_rfc2822(),
|
||||
article_history: &format!("_changes{}",
|
||||
article_history: &format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
.pagination(pagination)
|
||||
.article_id(Some(article_id))
|
||||
|
@ -50,7 +56,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
|
|||
),
|
||||
author: author.map(|author| Author {
|
||||
author: &author,
|
||||
history: format!("_changes{}",
|
||||
history: format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
.pagination(pagination)
|
||||
.article_id(Some(article_id))
|
||||
|
@ -58,7 +65,8 @@ pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &Dat
|
|||
.into_link()
|
||||
),
|
||||
}),
|
||||
}.to_string()
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
|
||||
impl Resource for ArticleRevisionResource {
|
||||
|
@ -68,9 +76,10 @@ impl Resource for ArticleRevisionResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -87,9 +96,9 @@ impl Resource for ArticleRevisionResource {
|
|||
let head = self.head();
|
||||
let data = self.data;
|
||||
|
||||
Box::new(head
|
||||
.and_then(move |head|
|
||||
Ok(head.with_body(system_page(
|
||||
Box::new(head.and_then(move |head| {
|
||||
Ok(head.with_body(
|
||||
system_page(
|
||||
Some("../../"), // Hmm, should perhaps accept `base` as argument
|
||||
&data.title,
|
||||
&Template {
|
||||
|
@ -98,11 +107,11 @@ impl Resource for ArticleRevisionResource {
|
|||
data.sequence_number,
|
||||
data.article_id,
|
||||
&Local.from_utc_datetime(&data.created),
|
||||
data.author.as_ref().map(|x| &**x)
|
||||
data.author.as_ref().map(|x| &**x),
|
||||
),
|
||||
diff_link:
|
||||
if data.revision > 1 {
|
||||
Some(format!("_diff/{}?{}",
|
||||
diff_link: if data.revision > 1 {
|
||||
Some(format!(
|
||||
"_diff/{}?{}",
|
||||
data.article_id,
|
||||
diff_resource::QueryParameters::new(
|
||||
data.revision as u32 - 1,
|
||||
|
@ -114,7 +123,9 @@ impl Resource for ArticleRevisionResource {
|
|||
},
|
||||
rendered: render_markdown(&data.body),
|
||||
},
|
||||
).to_string()))
|
||||
)
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use diesel;
|
||||
use futures::{self, Future};
|
||||
use futures::future::{done, finished};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
@ -40,8 +40,16 @@ pub struct QueryParameters {
|
|||
impl QueryParameters {
|
||||
pub fn pagination(self, pagination: Pagination<i32>) -> Self {
|
||||
Self {
|
||||
after: if let Pagination::After(x) = pagination { Some(x) } else { None },
|
||||
before: if let Pagination::Before(x) = pagination { Some(x) } else { None },
|
||||
after: if let Pagination::After(x) = pagination {
|
||||
Some(x)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
before: if let Pagination::Before(x) = pagination {
|
||||
Some(x)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +64,11 @@ impl QueryParameters {
|
|||
|
||||
pub fn limit(self, limit: i32) -> Self {
|
||||
Self {
|
||||
limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None },
|
||||
limit: if limit != DEFAULT_LIMIT {
|
||||
Some(limit)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
@ -76,9 +88,7 @@ fn apply_query_config<'a>(
|
|||
article_id: Option<i32>,
|
||||
author: Option<String>,
|
||||
limit: i32,
|
||||
)
|
||||
-> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>
|
||||
{
|
||||
) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite> {
|
||||
use diesel::prelude::*;
|
||||
|
||||
if let Some(article_id) = article_id {
|
||||
|
@ -94,10 +104,16 @@ fn apply_query_config<'a>(
|
|||
|
||||
impl ChangesLookup {
|
||||
pub fn new(state: State, show_authors: bool) -> ChangesLookup {
|
||||
Self { state, show_authors }
|
||||
Self {
|
||||
state,
|
||||
show_authors,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lookup(&self, query: Option<&str>) -> Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>> {
|
||||
pub fn lookup(
|
||||
&self,
|
||||
query: Option<&str>,
|
||||
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
|
||||
use super::pagination;
|
||||
|
||||
let state = self.state.clone();
|
||||
|
@ -117,25 +133,28 @@ impl ChangesLookup {
|
|||
|
||||
Ok((pagination, params.article_id, params.author, limit))
|
||||
})())
|
||||
.and_then(move |(pagination, article_id, author, limit)| match pagination {
|
||||
.and_then(move |(pagination, article_id, author, limit)| {
|
||||
match pagination {
|
||||
Pagination::After(x) => {
|
||||
let author2 = author.clone();
|
||||
|
||||
Box::new(state.query_article_revision_stubs(move |query| {
|
||||
Box::new(
|
||||
state
|
||||
.query_article_revision_stubs(move |query| {
|
||||
use diesel::prelude::*;
|
||||
|
||||
apply_query_config(query, article_id, author2, limit)
|
||||
.filter(article_revisions::sequence_number.gt(x))
|
||||
.order(article_revisions::sequence_number.asc())
|
||||
}).and_then(move |mut data| {
|
||||
})
|
||||
.and_then(move |mut data| {
|
||||
let extra_element = if data.len() > limit as usize {
|
||||
data.pop()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let args =
|
||||
QueryParameters {
|
||||
let args = QueryParameters {
|
||||
after: None,
|
||||
before: None,
|
||||
article_id,
|
||||
|
@ -146,19 +165,42 @@ impl ChangesLookup {
|
|||
|
||||
Ok(Some(match extra_element {
|
||||
Some(x) => Box::new(TemporaryRedirectResource::new(
|
||||
args
|
||||
.pagination(Pagination::Before(x.sequence_number))
|
||||
.into_link()
|
||||
)) as BoxResource,
|
||||
args.pagination(Pagination::Before(x.sequence_number))
|
||||
.into_link(),
|
||||
))
|
||||
as BoxResource,
|
||||
None => Box::new(TemporaryRedirectResource::new(
|
||||
args.into_link()
|
||||
)) as BoxResource,
|
||||
args.into_link(),
|
||||
))
|
||||
as BoxResource,
|
||||
}))
|
||||
})) as Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>>
|
||||
},
|
||||
Pagination::Before(x) => Box::new(finished(Some(Box::new(ChangesResource::new(state, show_authors, Some(x), article_id, author, limit)) as BoxResource))),
|
||||
Pagination::None => Box::new(finished(Some(Box::new(ChangesResource::new(state, show_authors, None, article_id, author, limit)) as BoxResource))),
|
||||
})
|
||||
}),
|
||||
)
|
||||
as Box<
|
||||
dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>,
|
||||
>
|
||||
}
|
||||
Pagination::Before(x) => {
|
||||
Box::new(finished(Some(Box::new(ChangesResource::new(
|
||||
state,
|
||||
show_authors,
|
||||
Some(x),
|
||||
article_id,
|
||||
author,
|
||||
limit,
|
||||
)) as BoxResource)))
|
||||
}
|
||||
Pagination::None => Box::new(finished(Some(Box::new(ChangesResource::new(
|
||||
state,
|
||||
show_authors,
|
||||
None,
|
||||
article_id,
|
||||
author,
|
||||
limit,
|
||||
))
|
||||
as BoxResource))),
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -173,8 +215,22 @@ pub struct ChangesResource {
|
|||
}
|
||||
|
||||
impl ChangesResource {
|
||||
pub fn new(state: State, show_authors: bool, before: Option<i32>, article_id: Option<i32>, author: Option<String>, limit: i32) -> Self {
|
||||
Self { state, show_authors, before, article_id, author, limit }
|
||||
pub fn new(
|
||||
state: State,
|
||||
show_authors: bool,
|
||||
before: Option<i32>,
|
||||
article_id: Option<i32>,
|
||||
author: Option<String>,
|
||||
limit: i32,
|
||||
) -> Self {
|
||||
Self {
|
||||
state,
|
||||
show_authors,
|
||||
before,
|
||||
article_id,
|
||||
author,
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
fn query_args(&self) -> QueryParameters {
|
||||
|
@ -196,14 +252,15 @@ impl Resource for ChangesResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
use chrono::{TimeZone, Local};
|
||||
use chrono::{Local, TimeZone};
|
||||
|
||||
struct Row<'a> {
|
||||
resource: &'a ChangesResource,
|
||||
|
@ -224,7 +281,8 @@ impl Resource for ChangesResource {
|
|||
|
||||
impl<'a> Row<'a> {
|
||||
fn author_link(&self) -> String {
|
||||
self.resource.query_args()
|
||||
self.resource
|
||||
.query_args()
|
||||
.pagination(Pagination::After(self.sequence_number))
|
||||
.author(self.author.clone())
|
||||
.into_link()
|
||||
|
@ -260,24 +318,25 @@ impl Resource for ChangesResource {
|
|||
}
|
||||
|
||||
fn all_articles_link(&self) -> Option<String> {
|
||||
self.resource.article_id.map(|_| {
|
||||
self.resource.query_args()
|
||||
.article_id(None)
|
||||
.into_link()
|
||||
})
|
||||
self.resource
|
||||
.article_id
|
||||
.map(|_| self.resource.query_args().article_id(None).into_link())
|
||||
}
|
||||
|
||||
fn all_authors_link(&self) -> Option<String> {
|
||||
self.resource.author.as_ref().map(|_| {
|
||||
self.resource.query_args()
|
||||
.author(None)
|
||||
.into_link()
|
||||
})
|
||||
self.resource
|
||||
.author
|
||||
.as_ref()
|
||||
.map(|_| self.resource.query_args().author(None).into_link())
|
||||
}
|
||||
}
|
||||
|
||||
let (before, article_id, author, limit) =
|
||||
(self.before.clone(), self.article_id.clone(), self.author.clone(), self.limit);
|
||||
let (before, article_id, author, limit) = (
|
||||
self.before.clone(),
|
||||
self.article_id.clone(),
|
||||
self.author.clone(),
|
||||
self.limit,
|
||||
);
|
||||
let data = self.state.query_article_revision_stubs(move |query| {
|
||||
use diesel::prelude::*;
|
||||
|
||||
|
@ -292,8 +351,7 @@ impl Resource for ChangesResource {
|
|||
|
||||
let head = self.head();
|
||||
|
||||
Box::new(data.join(head)
|
||||
.and_then(move |(mut data, head)| {
|
||||
Box::new(data.join(head).and_then(move |(mut data, head)| {
|
||||
use std::iter::Iterator;
|
||||
|
||||
let extra_element = if data.len() > self.limit as usize {
|
||||
|
@ -305,29 +363,41 @@ impl Resource for ChangesResource {
|
|||
let (newer, older) = match self.before {
|
||||
Some(x) => (
|
||||
Some(NavLinks {
|
||||
more: self.query_args().pagination(Pagination::After(x-1)).into_link(),
|
||||
more: self
|
||||
.query_args()
|
||||
.pagination(Pagination::After(x - 1))
|
||||
.into_link(),
|
||||
end: self.query_args().pagination(Pagination::None).into_link(),
|
||||
}),
|
||||
extra_element.map(|_| NavLinks {
|
||||
more: self.query_args()
|
||||
more: self
|
||||
.query_args()
|
||||
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
|
||||
.into_link(),
|
||||
end: self.query_args().pagination(Pagination::After(0)).into_link(),
|
||||
})
|
||||
end: self
|
||||
.query_args()
|
||||
.pagination(Pagination::After(0))
|
||||
.into_link(),
|
||||
}),
|
||||
),
|
||||
None => (
|
||||
None,
|
||||
extra_element.map(|_| NavLinks {
|
||||
more: self.query_args()
|
||||
more: self
|
||||
.query_args()
|
||||
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
|
||||
.into_link(),
|
||||
end: self.query_args().pagination(Pagination::After(0)).into_link(),
|
||||
end: self
|
||||
.query_args()
|
||||
.pagination(Pagination::After(0))
|
||||
.into_link(),
|
||||
}),
|
||||
),
|
||||
};
|
||||
|
||||
let changes = &data.into_iter().map(|x| {
|
||||
Row {
|
||||
let changes = &data
|
||||
.into_iter()
|
||||
.map(|x| Row {
|
||||
resource: &self,
|
||||
sequence_number: x.sequence_number,
|
||||
article_id: x.article_id,
|
||||
|
@ -337,9 +407,9 @@ impl Resource for ChangesResource {
|
|||
_slug: x.slug,
|
||||
title: x.title,
|
||||
_latest: x.latest,
|
||||
diff_link:
|
||||
if x.revision > 1 {
|
||||
Some(format!("_diff/{}?{}",
|
||||
diff_link: if x.revision > 1 {
|
||||
Some(format!(
|
||||
"_diff/{}?{}",
|
||||
x.article_id,
|
||||
diff_resource::QueryParameters::new(
|
||||
x.revision as u32 - 1,
|
||||
|
@ -349,10 +419,11 @@ impl Resource for ChangesResource {
|
|||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(head.with_body(system_page(
|
||||
Ok(head.with_body(
|
||||
system_page(
|
||||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"Changes",
|
||||
Template {
|
||||
|
@ -360,9 +431,11 @@ impl Resource for ChangesResource {
|
|||
show_authors: self.show_authors,
|
||||
newer,
|
||||
older,
|
||||
changes
|
||||
}
|
||||
).to_string()))
|
||||
changes,
|
||||
},
|
||||
)
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::fmt;
|
||||
|
||||
use diff;
|
||||
use futures::{self, Future};
|
||||
use futures::future::done;
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
@ -48,25 +48,28 @@ impl DiffLookup {
|
|||
Self { state }
|
||||
}
|
||||
|
||||
pub fn lookup(&self, article_id: u32, query: Option<&str>) -> Box<dyn Future<Item=Option<BoxResource>, Error=crate::web::Error>> {
|
||||
pub fn lookup(
|
||||
&self,
|
||||
article_id: u32,
|
||||
query: Option<&str>,
|
||||
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
|
||||
let state = self.state.clone();
|
||||
|
||||
Box::new(done(
|
||||
serde_urlencoded::from_str(query.unwrap_or(""))
|
||||
.map_err(Into::into)
|
||||
).and_then(move |params: QueryParameters| {
|
||||
Box::new(
|
||||
done(serde_urlencoded::from_str(query.unwrap_or("")).map_err(Into::into))
|
||||
.and_then(move |params: QueryParameters| {
|
||||
let from = state.get_article_revision(article_id as i32, params.from as i32);
|
||||
let to = state.get_article_revision(article_id as i32, params.to as i32);
|
||||
|
||||
from.join(to)
|
||||
}).and_then(move |(from, to)| {
|
||||
match (from, to) {
|
||||
(Some(from), Some(to)) =>
|
||||
Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource)),
|
||||
_ =>
|
||||
Ok(None),
|
||||
})
|
||||
.and_then(move |(from, to)| match (from, to) {
|
||||
(Some(from), Some(to)) => {
|
||||
Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource))
|
||||
}
|
||||
}))
|
||||
_ => Ok(None),
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,9 +92,10 @@ impl Resource for DiffResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -119,8 +123,7 @@ impl Resource for DiffResource {
|
|||
|
||||
let head = self.head();
|
||||
|
||||
Box::new(head
|
||||
.and_then(move |head| {
|
||||
Box::new(head.and_then(move |head| {
|
||||
let consecutive = self.to.revision - self.from.revision == 1;
|
||||
|
||||
let author = match consecutive {
|
||||
|
@ -128,14 +131,16 @@ impl Resource for DiffResource {
|
|||
false => None,
|
||||
};
|
||||
|
||||
let author_link = &format!("_changes{}",
|
||||
let author_link = &format!(
|
||||
"_changes{}",
|
||||
changes_resource::QueryParameters::default()
|
||||
.author(author.map(|x| x.to_owned()))
|
||||
.pagination(Pagination::After(self.from.sequence_number))
|
||||
.into_link()
|
||||
);
|
||||
|
||||
let article_history_link = &format!("_changes{}",
|
||||
let article_history_link = &format!(
|
||||
"_changes{}",
|
||||
changes_resource::QueryParameters::default()
|
||||
.article_id(Some(self.from.article_id))
|
||||
.pagination(Pagination::After(self.from.sequence_number))
|
||||
|
@ -145,23 +150,41 @@ impl Resource for DiffResource {
|
|||
let title = &diff::chars(&self.from.title, &self.to.title)
|
||||
.into_iter()
|
||||
.map(|x| match x {
|
||||
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() },
|
||||
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() },
|
||||
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() },
|
||||
diff::Result::Left(x) => Diff {
|
||||
removed: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
diff::Result::Both(x, _) => Diff {
|
||||
same: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
diff::Result::Right(x) => Diff {
|
||||
added: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let lines = &diff::lines(&self.from.body, &self.to.body)
|
||||
.into_iter()
|
||||
.map(|x| match x {
|
||||
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() },
|
||||
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() },
|
||||
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() },
|
||||
diff::Result::Left(x) => Diff {
|
||||
removed: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
diff::Result::Both(x, _) => Diff {
|
||||
same: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
diff::Result::Right(x) => Diff {
|
||||
added: Some(x),
|
||||
..Default::default()
|
||||
},
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(head
|
||||
.with_body(Layout {
|
||||
Ok(head.with_body(
|
||||
Layout {
|
||||
base: Some("../"), // Hmm, should perhaps accept `base` as argument
|
||||
title: "Difference",
|
||||
theme: theme::theme_from_str_hash("Difference"),
|
||||
|
@ -171,12 +194,17 @@ impl Resource for DiffResource {
|
|||
author,
|
||||
author_link,
|
||||
article_history_link,
|
||||
from_link: &format!("_revisions/{}/{}", self.from.article_id, self.from.revision),
|
||||
from_link: &format!(
|
||||
"_revisions/{}/{}",
|
||||
self.from.article_id, self.from.revision
|
||||
),
|
||||
to_link: &format!("_revisions/{}/{}", self.to.article_id, self.to.revision),
|
||||
title,
|
||||
lines,
|
||||
},
|
||||
}.to_string()))
|
||||
}
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,11 @@ pub struct HtmlResource {
|
|||
|
||||
impl HtmlResource {
|
||||
pub fn new(base: Option<&'static str>, title: &'static str, html_body: &'static str) -> Self {
|
||||
HtmlResource { base, title, html_body }
|
||||
HtmlResource {
|
||||
base,
|
||||
title,
|
||||
html_body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,22 +30,18 @@ impl Resource for HtmlResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
let head = self.head();
|
||||
|
||||
Box::new(head
|
||||
.and_then(move |head| {
|
||||
Ok(head.with_body(system_page(
|
||||
self.base,
|
||||
self.title,
|
||||
self.html_body
|
||||
).to_string()))
|
||||
Box::new(head.and_then(move |head| {
|
||||
Ok(head.with_body(system_page(self.base, self.title, self.html_body).to_string()))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
pub mod pagination;
|
||||
|
||||
mod about_resource;
|
||||
mod article_revision_resource;
|
||||
mod article_resource;
|
||||
mod article_revision_resource;
|
||||
mod changes_resource;
|
||||
mod diff_resource;
|
||||
mod html_resource;
|
||||
|
@ -13,8 +13,8 @@ mod sitemap_resource;
|
|||
mod temporary_redirect_resource;
|
||||
|
||||
pub use self::about_resource::AboutResource;
|
||||
pub use self::article_revision_resource::ArticleRevisionResource;
|
||||
pub use self::article_resource::ArticleResource;
|
||||
pub use self::article_revision_resource::ArticleRevisionResource;
|
||||
pub use self::changes_resource::{ChangesLookup, ChangesResource};
|
||||
pub use self::diff_resource::{DiffLookup, DiffResource};
|
||||
pub use self::html_resource::HtmlResource;
|
||||
|
|
|
@ -52,9 +52,10 @@ impl Resource for NewArticleResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::NotFound)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -84,13 +85,14 @@ impl Resource for NewArticleResource {
|
|||
}
|
||||
}
|
||||
|
||||
let title = self.slug.as_ref()
|
||||
let title = self
|
||||
.slug
|
||||
.as_ref()
|
||||
.map_or("".to_owned(), |x| title_from_slug(x));
|
||||
|
||||
Box::new(self.head()
|
||||
.and_then(move |head| {
|
||||
Ok(head
|
||||
.with_body(Layout {
|
||||
Box::new(self.head().and_then(move |head| {
|
||||
Ok(head.with_body(
|
||||
Layout {
|
||||
base: None, // Hmm, should perhaps accept `base` as argument
|
||||
title: &title,
|
||||
theme: theme::Theme::Gray,
|
||||
|
@ -102,12 +104,17 @@ impl Resource for NewArticleResource {
|
|||
title: &title,
|
||||
raw: "",
|
||||
rendered: EMPTY_ARTICLE_MESSAGE,
|
||||
themes: &theme::THEMES.iter().map(|&x| SelectableTheme {
|
||||
themes: &theme::THEMES
|
||||
.iter()
|
||||
.map(|&x| SelectableTheme {
|
||||
theme: x,
|
||||
selected: false,
|
||||
}).collect::<Vec<_>>(),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
},
|
||||
}.to_string()))
|
||||
}
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -115,7 +122,7 @@ impl Resource for NewArticleResource {
|
|||
// TODO Check incoming Content-Type
|
||||
// TODO Refactor? Reduce duplication with ArticleResource::put?
|
||||
|
||||
use chrono::{TimeZone, Local};
|
||||
use chrono::{Local, TimeZone};
|
||||
use futures::Stream;
|
||||
|
||||
#[derive(BartDisplay)]
|
||||
|
@ -137,25 +144,30 @@ impl Resource for NewArticleResource {
|
|||
last_updated: &'a str,
|
||||
}
|
||||
|
||||
Box::new(body
|
||||
.concat2()
|
||||
Box::new(
|
||||
body.concat2()
|
||||
.map_err(Into::into)
|
||||
.and_then(|body| {
|
||||
serde_urlencoded::from_bytes(&body)
|
||||
.map_err(Into::into)
|
||||
})
|
||||
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
|
||||
.and_then(move |arg: CreateArticle| {
|
||||
if arg.base_revision != NEW {
|
||||
unimplemented!("Version update conflict");
|
||||
}
|
||||
let theme = arg.theme.unwrap_or_else(theme::random);
|
||||
self.state.create_article(self.slug.clone(), arg.title, arg.body, identity, theme)
|
||||
self.state.create_article(
|
||||
self.slug.clone(),
|
||||
arg.title,
|
||||
arg.body,
|
||||
identity,
|
||||
theme,
|
||||
)
|
||||
})
|
||||
.and_then(|updated| {
|
||||
futures::finished(Response::new()
|
||||
futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(APPLICATION_JSON.clone()))
|
||||
.with_body(serde_json::to_string(&PutResponse {
|
||||
.with_body(
|
||||
serde_json::to_string(&PutResponse {
|
||||
slug: &updated.slug,
|
||||
article_id: updated.article_id,
|
||||
revision: updated.revision,
|
||||
|
@ -165,15 +177,18 @@ impl Resource for NewArticleResource {
|
|||
rendered: &Template {
|
||||
title: &updated.title,
|
||||
rendered: render_markdown(&updated.body),
|
||||
}.to_string(),
|
||||
}
|
||||
.to_string(),
|
||||
last_updated: &super::article_resource::last_updated(
|
||||
updated.article_id,
|
||||
&Local.from_utc_datetime(&updated.created),
|
||||
updated.author.as_ref().map(|x| &**x)
|
||||
updated.author.as_ref().map(|x| &**x),
|
||||
),
|
||||
}).expect("Should never fail"))
|
||||
)
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -183,28 +198,32 @@ impl Resource for NewArticleResource {
|
|||
|
||||
use futures::Stream;
|
||||
|
||||
Box::new(body
|
||||
.concat2()
|
||||
Box::new(
|
||||
body.concat2()
|
||||
.map_err(Into::into)
|
||||
.and_then(|body| {
|
||||
serde_urlencoded::from_bytes(&body)
|
||||
.map_err(Into::into)
|
||||
})
|
||||
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
|
||||
.and_then(move |arg: CreateArticle| {
|
||||
if arg.base_revision != NEW {
|
||||
unimplemented!("Version update conflict");
|
||||
}
|
||||
let theme = arg.theme.unwrap_or_else(theme::random);
|
||||
self.state.create_article(self.slug.clone(), arg.title, arg.body, identity, theme)
|
||||
self.state.create_article(
|
||||
self.slug.clone(),
|
||||
arg.title,
|
||||
arg.body,
|
||||
identity,
|
||||
theme,
|
||||
)
|
||||
})
|
||||
.and_then(|updated| {
|
||||
futures::finished(Response::new()
|
||||
futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::SeeOther)
|
||||
.with_header(ContentType(TEXT_PLAIN.clone()))
|
||||
.with_header(Location::new(updated.link().to_owned()))
|
||||
.with_body("See other")
|
||||
.with_body("See other"),
|
||||
)
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ impl<T> PaginationStruct<T> {
|
|||
(Some(x), None) => Ok(Pagination::After(x)),
|
||||
(None, Some(x)) => Ok(Pagination::Before(x)),
|
||||
(None, None) => Ok(Pagination::None),
|
||||
_ => Err(Error)
|
||||
_ => Err(Error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use futures::Future;
|
||||
use hyper::header::{ContentType, ContentLength, CacheControl, CacheDirective};
|
||||
use hyper::header::{CacheControl, CacheDirective, ContentLength, ContentType};
|
||||
use hyper::server::*;
|
||||
use hyper::StatusCode;
|
||||
|
||||
|
@ -18,21 +18,21 @@ impl Resource for ReadOnlyResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(::futures::finished(Response::new()
|
||||
Box::new(::futures::finished(
|
||||
Response::new()
|
||||
.with_status(StatusCode::Ok)
|
||||
.with_header(ContentType(self.content_type.clone()))
|
||||
.with_header(CacheControl(vec![
|
||||
CacheDirective::MustRevalidate,
|
||||
CacheDirective::NoStore,
|
||||
]))
|
||||
])),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
Box::new(self.head().map(move |head|
|
||||
head
|
||||
.with_header(ContentLength(self.body.len() as u64))
|
||||
Box::new(self.head().map(move |head| {
|
||||
head.with_header(ContentLength(self.body.len() as u64))
|
||||
.with_body(self.body.clone())
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,14 +34,22 @@ impl QueryParameters {
|
|||
|
||||
pub fn limit(self, limit: u32) -> Self {
|
||||
Self {
|
||||
limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None },
|
||||
limit: if limit != DEFAULT_LIMIT {
|
||||
Some(limit)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn snippet_size(self, snippet_size: u32) -> Self {
|
||||
Self {
|
||||
snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE { Some(snippet_size) } else { None },
|
||||
snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE {
|
||||
Some(snippet_size)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
@ -69,15 +77,13 @@ impl SearchLookup {
|
|||
pub fn lookup(&self, query: Option<&str>) -> Result<Option<BoxResource>, crate::web::Error> {
|
||||
let args: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?;
|
||||
|
||||
Ok(Some(Box::new(
|
||||
SearchResource::new(
|
||||
Ok(Some(Box::new(SearchResource::new(
|
||||
self.state.clone(),
|
||||
args.q,
|
||||
args.limit.unwrap_or(DEFAULT_LIMIT),
|
||||
args.offset.unwrap_or(0),
|
||||
args.snippet_size.unwrap_or(DEFAULT_SNIPPET_SIZE),
|
||||
)
|
||||
)))
|
||||
))))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,8 +104,21 @@ pub enum ResponseType {
|
|||
}
|
||||
|
||||
impl SearchResource {
|
||||
pub fn new(state: State, query: Option<String>, limit: u32, offset: u32, snippet_size: u32) -> Self {
|
||||
Self { state, response_type: ResponseType::Html, query, limit, offset, snippet_size }
|
||||
pub fn new(
|
||||
state: State,
|
||||
query: Option<String>,
|
||||
limit: u32,
|
||||
offset: u32,
|
||||
snippet_size: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
state,
|
||||
response_type: ResponseType::Html,
|
||||
query,
|
||||
limit,
|
||||
offset,
|
||||
snippet_size,
|
||||
}
|
||||
}
|
||||
|
||||
fn query_args(&self) -> QueryParameters {
|
||||
|
@ -126,8 +145,10 @@ impl Resource for SearchResource {
|
|||
|
||||
self.response_type = match accept.first() {
|
||||
Some(&QualityItem { item: ref mime, .. })
|
||||
if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON
|
||||
=> ResponseType::Json,
|
||||
if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON =>
|
||||
{
|
||||
ResponseType::Json
|
||||
}
|
||||
_ => ResponseType::Html,
|
||||
};
|
||||
}
|
||||
|
@ -138,9 +159,10 @@ impl Resource for SearchResource {
|
|||
&ResponseType::Html => ContentType(TEXT_HTML.clone()),
|
||||
};
|
||||
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(content_type)
|
||||
.with_header(content_type),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -163,17 +185,26 @@ impl Resource for SearchResource {
|
|||
}
|
||||
|
||||
// TODO: Show a search "front page" when no query is given:
|
||||
let query = self.query.as_ref().map(|x| x.clone()).unwrap_or("".to_owned());
|
||||
let query = self
|
||||
.query
|
||||
.as_ref()
|
||||
.map(|x| x.clone())
|
||||
.unwrap_or("".to_owned());
|
||||
|
||||
let data = self.state.search_query(query, (self.limit + 1) as i32, self.offset as i32, self.snippet_size as i32);
|
||||
let data = self.state.search_query(
|
||||
query,
|
||||
(self.limit + 1) as i32,
|
||||
self.offset as i32,
|
||||
self.snippet_size as i32,
|
||||
);
|
||||
let head = self.head();
|
||||
|
||||
Box::new(data.join(head)
|
||||
.and_then(move |(mut data, head)| {
|
||||
Box::new(data.join(head).and_then(move |(mut data, head)| {
|
||||
let prev = if self.offset > 0 {
|
||||
Some(self.query_args()
|
||||
Some(
|
||||
self.query_args()
|
||||
.offset(self.offset.saturating_sub(self.limit))
|
||||
.into_link()
|
||||
.into_link(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
|
@ -181,35 +212,38 @@ impl Resource for SearchResource {
|
|||
|
||||
let next = if data.len() > self.limit as usize {
|
||||
data.pop();
|
||||
Some(self.query_args()
|
||||
Some(
|
||||
self.query_args()
|
||||
.offset(self.offset + self.limit)
|
||||
.into_link()
|
||||
.into_link(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match &self.response_type {
|
||||
&ResponseType::Json => Ok(head
|
||||
.with_body(serde_json::to_string(&JsonResponse {
|
||||
&ResponseType::Json => Ok(head.with_body(
|
||||
serde_json::to_string(&JsonResponse {
|
||||
query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
|
||||
hits: &data,
|
||||
prev,
|
||||
next,
|
||||
}).expect("Should never fail"))
|
||||
),
|
||||
&ResponseType::Html => Ok(head.with_body(system_page(
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
)),
|
||||
&ResponseType::Html => Ok(head.with_body(
|
||||
system_page(
|
||||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"Search",
|
||||
&Template {
|
||||
query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
|
||||
hits: &data.iter()
|
||||
.enumerate()
|
||||
.collect::<Vec<_>>(),
|
||||
hits: &data.iter().enumerate().collect::<Vec<_>>(),
|
||||
prev,
|
||||
next,
|
||||
},
|
||||
).to_string())),
|
||||
)
|
||||
.to_string(),
|
||||
)),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -26,9 +26,10 @@ impl Resource for SitemapResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::Ok)
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_header(ContentType(TEXT_HTML.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -42,15 +43,17 @@ impl Resource for SitemapResource {
|
|||
let data = self.state.get_latest_article_revision_stubs();
|
||||
let head = self.head();
|
||||
|
||||
Box::new(data.join(head)
|
||||
.and_then(move |(articles, head)| {
|
||||
Ok(head.with_body(system_page(
|
||||
Box::new(data.join(head).and_then(move |(articles, head)| {
|
||||
Ok(head.with_body(
|
||||
system_page(
|
||||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"Sitemap",
|
||||
Template {
|
||||
articles: &articles,
|
||||
},
|
||||
).to_string()))
|
||||
)
|
||||
.to_string(),
|
||||
))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,7 @@ impl TemporaryRedirectResource {
|
|||
}
|
||||
|
||||
pub fn from_slug<S: AsRef<str>>(slug: S, edit: bool) -> Self {
|
||||
let base =
|
||||
if slug.as_ref().is_empty() {
|
||||
let base = if slug.as_ref().is_empty() {
|
||||
"."
|
||||
} else {
|
||||
slug.as_ref()
|
||||
|
@ -25,7 +24,7 @@ impl TemporaryRedirectResource {
|
|||
let tail = if edit { "?edit" } else { "" };
|
||||
|
||||
Self {
|
||||
location: format!("{}{}", base, tail)
|
||||
location: format!("{}{}", base, tail),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,18 +36,18 @@ impl Resource for TemporaryRedirectResource {
|
|||
}
|
||||
|
||||
fn head(&self) -> ResponseFuture {
|
||||
Box::new(futures::finished(Response::new()
|
||||
Box::new(futures::finished(
|
||||
Response::new()
|
||||
.with_status(hyper::StatusCode::TemporaryRedirect)
|
||||
.with_header(Location::new(self.location.clone()))
|
||||
.with_header(Location::new(self.location.clone())),
|
||||
))
|
||||
}
|
||||
|
||||
fn get(self: Box<Self>) -> ResponseFuture {
|
||||
Box::new(self.head()
|
||||
.and_then(move |head| {
|
||||
Ok(head
|
||||
.with_body(format!("Moved to {}", self.location)))
|
||||
}))
|
||||
Box::new(
|
||||
self.head()
|
||||
.and_then(move |head| Ok(head.with_body(format!("Moved to {}", self.location)))),
|
||||
)
|
||||
}
|
||||
|
||||
fn put(self: Box<Self>, _body: hyper::Body, _identity: Option<String>) -> ResponseFuture {
|
||||
|
|
86
src/site.rs
86
src/site.rs
|
@ -4,12 +4,12 @@
|
|||
use std::fmt;
|
||||
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
use hyper::header::{Accept, ContentType, Server};
|
||||
use hyper::mime;
|
||||
use hyper::server::*;
|
||||
use hyper;
|
||||
|
||||
use crate::assets::{ThemesCss, StyleCss, SearchJs};
|
||||
use crate::assets::{SearchJs, StyleCss, ThemesCss};
|
||||
use crate::build_config;
|
||||
use crate::theme;
|
||||
use crate::web::Lookup;
|
||||
|
@ -17,8 +17,7 @@ use crate::wiki_lookup::WikiLookup;
|
|||
|
||||
lazy_static! {
|
||||
static ref TEXT_HTML: mime::Mime = "text/html;charset=utf-8".parse().unwrap();
|
||||
static ref SERVER: Server =
|
||||
Server::new(build_config::HTTP_SERVER.as_str());
|
||||
static ref SERVER: Server = Server::new(build_config::HTTP_SERVER.as_str());
|
||||
}
|
||||
|
||||
header! { (XIdentity, "X-Identity") => [String] }
|
||||
|
@ -33,12 +32,22 @@ pub struct Layout<'a, T: 'a + fmt::Display> {
|
|||
}
|
||||
|
||||
impl<'a, T: 'a + fmt::Display> Layout<'a, T> {
|
||||
pub fn themes_css(&self) -> &str { ThemesCss::resource_name() }
|
||||
pub fn style_css(&self) -> &str { StyleCss::resource_name() }
|
||||
pub fn search_js(&self) -> &str { SearchJs::resource_name() }
|
||||
pub fn themes_css(&self) -> &str {
|
||||
ThemesCss::resource_name()
|
||||
}
|
||||
pub fn style_css(&self) -> &str {
|
||||
StyleCss::resource_name()
|
||||
}
|
||||
pub fn search_js(&self) -> &str {
|
||||
SearchJs::resource_name()
|
||||
}
|
||||
|
||||
pub fn project_name(&self) -> &str { build_config::PROJECT_NAME }
|
||||
pub fn version(&self) -> &str { build_config::VERSION.as_str() }
|
||||
pub fn project_name(&self) -> &str {
|
||||
build_config::PROJECT_NAME
|
||||
}
|
||||
pub fn version(&self) -> &str {
|
||||
build_config::VERSION.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(BartDisplay)]
|
||||
|
@ -48,10 +57,13 @@ pub struct SystemPageLayout<'a, T: 'a + fmt::Display> {
|
|||
html_body: T,
|
||||
}
|
||||
|
||||
pub fn system_page<'a, T>(base: Option<&'a str>, title: &'a str, body: T)
|
||||
-> Layout<'a, SystemPageLayout<'a, T>>
|
||||
pub fn system_page<'a, T>(
|
||||
base: Option<&'a str>,
|
||||
title: &'a str,
|
||||
body: T,
|
||||
) -> Layout<'a, SystemPageLayout<'a, T>>
|
||||
where
|
||||
T: 'a + fmt::Display
|
||||
T: 'a + fmt::Display,
|
||||
{
|
||||
Layout {
|
||||
base,
|
||||
|
@ -79,30 +91,28 @@ pub struct Site {
|
|||
|
||||
impl Site {
|
||||
pub fn new(root: WikiLookup, trust_identity: bool) -> Site {
|
||||
Site { root, trust_identity }
|
||||
Site {
|
||||
root,
|
||||
trust_identity,
|
||||
}
|
||||
}
|
||||
|
||||
fn not_found(base: Option<&str>) -> Response {
|
||||
Response::new()
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_body(system_page(
|
||||
base,
|
||||
"Not found",
|
||||
NotFound,
|
||||
).to_string())
|
||||
.with_body(system_page(base, "Not found", NotFound).to_string())
|
||||
.with_status(hyper::StatusCode::NotFound)
|
||||
}
|
||||
|
||||
fn internal_server_error(base: Option<&str>, err: Box<dyn ::std::error::Error + Send + Sync>) -> Response {
|
||||
fn internal_server_error(
|
||||
base: Option<&str>,
|
||||
err: Box<dyn ::std::error::Error + Send + Sync>,
|
||||
) -> Response {
|
||||
eprintln!("Internal Server Error:\n{:#?}", err);
|
||||
|
||||
Response::new()
|
||||
.with_header(ContentType(TEXT_HTML.clone()))
|
||||
.with_body(system_page(
|
||||
base,
|
||||
"Internal server error",
|
||||
InternalServerError,
|
||||
).to_string())
|
||||
.with_body(system_page(base, "Internal server error", InternalServerError).to_string())
|
||||
.with_status(hyper::StatusCode::InternalServerError)
|
||||
}
|
||||
}
|
||||
|
@ -113,7 +123,7 @@ fn root_base_from_request_uri(path: &str) -> Option<String> {
|
|||
|
||||
match slashes {
|
||||
0 => None,
|
||||
n => Some(::std::iter::repeat("../").take(n).collect())
|
||||
n => Some(::std::iter::repeat("../").take(n).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,12 +143,17 @@ impl Service for Site {
|
|||
false => None,
|
||||
};
|
||||
|
||||
let accept_header = headers.get().map(|x: &Accept| x.clone()).unwrap_or(Accept(vec![]));
|
||||
let accept_header = headers
|
||||
.get()
|
||||
.map(|x: &Accept| x.clone())
|
||||
.unwrap_or(Accept(vec![]));
|
||||
|
||||
let base = root_base_from_request_uri(uri.path());
|
||||
let base2 = base.clone(); // Bah, stupid clone
|
||||
|
||||
Box::new(self.root.lookup(uri.path(), uri.query())
|
||||
Box::new(
|
||||
self.root
|
||||
.lookup(uri.path(), uri.query())
|
||||
.and_then(move |resource| match resource {
|
||||
Some(mut resource) => {
|
||||
use hyper::Method::*;
|
||||
|
@ -149,13 +164,20 @@ impl Service for Site {
|
|||
Get => resource.get(),
|
||||
Put => resource.put(body, identity),
|
||||
Post => resource.post(body, identity),
|
||||
_ => Box::new(futures::finished(resource.method_not_allowed()))
|
||||
_ => Box::new(futures::finished(resource.method_not_allowed())),
|
||||
}
|
||||
},
|
||||
None => Box::new(futures::finished(Self::not_found(base.as_ref().map(|x| &**x))))
|
||||
}
|
||||
None => Box::new(futures::finished(Self::not_found(
|
||||
base.as_ref().map(|x| &**x),
|
||||
))),
|
||||
})
|
||||
.or_else(move |err| Ok(Self::internal_server_error(base2.as_ref().map(|x| &**x), err)))
|
||||
.map(|response| response.with_header(SERVER.clone()))
|
||||
.or_else(move |err| {
|
||||
Ok(Self::internal_server_error(
|
||||
base2.as_ref().map(|x| &**x),
|
||||
err,
|
||||
))
|
||||
})
|
||||
.map(|response| response.with_header(SERVER.clone())),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
488
src/state.rs
488
src/state.rs
|
@ -1,8 +1,8 @@
|
|||
use std;
|
||||
|
||||
use diesel;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use futures_cpupool::{self, CpuFuture};
|
||||
use r2d2::Pool;
|
||||
use r2d2_diesel::ConnectionManager;
|
||||
|
@ -22,10 +22,7 @@ pub type Error = Box<dyn std::error::Error + Send + Sync>;
|
|||
|
||||
pub enum SlugLookup {
|
||||
Miss,
|
||||
Hit {
|
||||
article_id: i32,
|
||||
revision: i32,
|
||||
},
|
||||
Hit { article_id: i32, revision: i32 },
|
||||
Redirect(String),
|
||||
}
|
||||
|
||||
|
@ -52,7 +49,11 @@ pub struct RebaseConflict {
|
|||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum RebaseResult {
|
||||
Clean { title: String, body: String, theme: Theme },
|
||||
Clean {
|
||||
title: String,
|
||||
body: String,
|
||||
theme: Theme,
|
||||
},
|
||||
Conflict(RebaseConflict),
|
||||
}
|
||||
|
||||
|
@ -61,7 +62,13 @@ pub enum UpdateResult {
|
|||
RebaseConflict(RebaseConflict),
|
||||
}
|
||||
|
||||
fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> {
|
||||
fn decide_slug(
|
||||
conn: &SqliteConnection,
|
||||
article_id: i32,
|
||||
prev_title: &str,
|
||||
title: &str,
|
||||
prev_slug: Option<&str>,
|
||||
) -> Result<String, Error> {
|
||||
let base_slug = ::slug::slugify(title);
|
||||
|
||||
if let Some(prev_slug) = prev_slug {
|
||||
|
@ -79,7 +86,11 @@ fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title
|
|||
}
|
||||
}
|
||||
|
||||
let base_slug = if base_slug.is_empty() { "article" } else { &base_slug };
|
||||
let base_slug = if base_slug.is_empty() {
|
||||
"article"
|
||||
} else {
|
||||
&base_slug
|
||||
};
|
||||
|
||||
use crate::schema::article_revisions;
|
||||
|
||||
|
@ -92,7 +103,8 @@ fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title
|
|||
.filter(article_revisions::slug.eq(&slug))
|
||||
.filter(article_revisions::latest.eq(true))
|
||||
.count()
|
||||
.first::<i64>(conn)? != 0;
|
||||
.first::<i64>(conn)?
|
||||
!= 0;
|
||||
|
||||
if !slug_in_use {
|
||||
break Ok(slug);
|
||||
|
@ -123,7 +135,11 @@ impl<'a> SyncState<'a> {
|
|||
.optional()?)
|
||||
}
|
||||
|
||||
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevision>, Error> {
|
||||
pub fn get_article_revision(
|
||||
&self,
|
||||
article_id: i32,
|
||||
revision: i32,
|
||||
) -> Result<Option<models::ArticleRevision>, Error> {
|
||||
use crate::schema::article_revisions;
|
||||
|
||||
Ok(article_revisions::table
|
||||
|
@ -133,12 +149,15 @@ impl<'a> SyncState<'a> {
|
|||
.optional()?)
|
||||
}
|
||||
|
||||
pub fn query_article_revision_stubs<F>(&self, f: F) -> Result<Vec<models::ArticleRevisionStub>, Error>
|
||||
pub fn query_article_revision_stubs<F>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> Result<Vec<models::ArticleRevisionStub>, Error>
|
||||
where
|
||||
F: 'static + Send + Sync,
|
||||
for <'x> F:
|
||||
FnOnce(article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>) ->
|
||||
for<'x> F: FnOnce(
|
||||
article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
|
||||
) -> article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
|
||||
{
|
||||
use crate::schema::article_revisions::dsl::*;
|
||||
|
||||
|
@ -154,19 +173,24 @@ impl<'a> SyncState<'a> {
|
|||
author,
|
||||
theme,
|
||||
))
|
||||
.load(self.db_connection)?
|
||||
)
|
||||
.load(self.db_connection)?)
|
||||
}
|
||||
|
||||
fn get_article_revision_stub(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevisionStub>, Error> {
|
||||
fn get_article_revision_stub(
|
||||
&self,
|
||||
article_id: i32,
|
||||
revision: i32,
|
||||
) -> Result<Option<models::ArticleRevisionStub>, Error> {
|
||||
use crate::schema::article_revisions;
|
||||
|
||||
Ok(self.query_article_revision_stubs(move |query| {
|
||||
Ok(self
|
||||
.query_article_revision_stubs(move |query| {
|
||||
query
|
||||
.filter(article_revisions::article_id.eq(article_id))
|
||||
.filter(article_revisions::revision.eq(revision))
|
||||
.limit(1)
|
||||
})?.pop())
|
||||
})?
|
||||
.pop())
|
||||
}
|
||||
|
||||
pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> {
|
||||
|
@ -180,7 +204,8 @@ impl<'a> SyncState<'a> {
|
|||
self.db_connection.transaction(|| {
|
||||
use crate::schema::article_revisions;
|
||||
|
||||
Ok(match article_revisions::table
|
||||
Ok(
|
||||
match article_revisions::table
|
||||
.filter(article_revisions::slug.eq(slug))
|
||||
.order(article_revisions::sequence_number.desc())
|
||||
.select((
|
||||
|
@ -201,15 +226,22 @@ impl<'a> SyncState<'a> {
|
|||
.filter(article_revisions::latest.eq(true))
|
||||
.filter(article_revisions::article_id.eq(stub.article_id))
|
||||
.select(article_revisions::slug)
|
||||
.first::<String>(self.db_connection)?
|
||||
.first::<String>(self.db_connection)?,
|
||||
),
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn rebase_update(&self, article_id: i32, target_base_revision: i32, existing_base_revision: i32, title: String, body: String, theme: Theme)
|
||||
-> Result<RebaseResult, Error>
|
||||
{
|
||||
fn rebase_update(
|
||||
&self,
|
||||
article_id: i32,
|
||||
target_base_revision: i32,
|
||||
existing_base_revision: i32,
|
||||
title: String,
|
||||
body: String,
|
||||
theme: Theme,
|
||||
) -> Result<RebaseResult, Error> {
|
||||
let mut title_a = title;
|
||||
let mut body_a = body;
|
||||
let mut theme_a = theme;
|
||||
|
@ -242,7 +274,11 @@ impl<'a> SyncState<'a> {
|
|||
|
||||
fn merge_themes(a: Theme, o: Theme, b: Theme) -> Theme {
|
||||
// Last change wins
|
||||
if a != o { a } else { b }
|
||||
if a != o {
|
||||
a
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
let update = {
|
||||
|
@ -254,12 +290,14 @@ impl<'a> SyncState<'a> {
|
|||
(Clean(title), Clean(body)) => (title, body, theme),
|
||||
(title_merge, body_merge) => {
|
||||
return Ok(RebaseResult::Conflict(RebaseConflict {
|
||||
base_article: self.get_article_revision_stub(article_id, revision+1)?.expect("Application layer guarantee"),
|
||||
base_article: self
|
||||
.get_article_revision_stub(article_id, revision + 1)?
|
||||
.expect("Application layer guarantee"),
|
||||
title: title_merge,
|
||||
body: body_merge.to_strings(),
|
||||
theme,
|
||||
}));
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -268,12 +306,22 @@ impl<'a> SyncState<'a> {
|
|||
theme_a = update.2;
|
||||
}
|
||||
|
||||
Ok(RebaseResult::Clean { title: title_a, body: body_a, theme: theme_a })
|
||||
Ok(RebaseResult::Clean {
|
||||
title: title_a,
|
||||
body: body_a,
|
||||
theme: theme_a,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>, theme: Option<Theme>)
|
||||
-> Result<UpdateResult, Error>
|
||||
{
|
||||
pub fn update_article(
|
||||
&self,
|
||||
article_id: i32,
|
||||
base_revision: i32,
|
||||
title: String,
|
||||
body: String,
|
||||
author: Option<String>,
|
||||
theme: Option<Theme>,
|
||||
) -> Result<UpdateResult, Error> {
|
||||
if title.is_empty() {
|
||||
Err("title cannot be empty")?;
|
||||
}
|
||||
|
@ -300,7 +348,14 @@ impl<'a> SyncState<'a> {
|
|||
}
|
||||
|
||||
let theme = theme.unwrap_or(prev_theme);
|
||||
let rebase_result = self.rebase_update(article_id, latest_revision, base_revision, title, body, theme)?;
|
||||
let rebase_result = self.rebase_update(
|
||||
article_id,
|
||||
latest_revision,
|
||||
base_revision,
|
||||
title,
|
||||
body,
|
||||
theme,
|
||||
)?;
|
||||
|
||||
let (title, body, theme) = match rebase_result {
|
||||
RebaseResult::Clean { title, body, theme } => (title, body, theme),
|
||||
|
@ -309,12 +364,18 @@ impl<'a> SyncState<'a> {
|
|||
|
||||
let new_revision = latest_revision + 1;
|
||||
|
||||
let slug = decide_slug(self.db_connection, article_id, &prev_title, &title, Some(&prev_slug))?;
|
||||
let slug = decide_slug(
|
||||
self.db_connection,
|
||||
article_id,
|
||||
&prev_title,
|
||||
&title,
|
||||
Some(&prev_slug),
|
||||
)?;
|
||||
|
||||
diesel::update(
|
||||
article_revisions::table
|
||||
.filter(article_revisions::article_id.eq(article_id))
|
||||
.filter(article_revisions::revision.eq(latest_revision))
|
||||
.filter(article_revisions::revision.eq(latest_revision)),
|
||||
)
|
||||
.set(article_revisions::latest.eq(false))
|
||||
.execute(self.db_connection)?;
|
||||
|
@ -332,17 +393,23 @@ impl<'a> SyncState<'a> {
|
|||
})
|
||||
.execute(self.db_connection)?;
|
||||
|
||||
Ok(UpdateResult::Success(article_revisions::table
|
||||
Ok(UpdateResult::Success(
|
||||
article_revisions::table
|
||||
.filter(article_revisions::article_id.eq(article_id))
|
||||
.filter(article_revisions::revision.eq(new_revision))
|
||||
.first::<models::ArticleRevision>(self.db_connection)?
|
||||
.first::<models::ArticleRevision>(self.db_connection)?,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>, theme: Theme)
|
||||
-> Result<models::ArticleRevision, Error>
|
||||
{
|
||||
pub fn create_article(
|
||||
&self,
|
||||
target_slug: Option<String>,
|
||||
title: String,
|
||||
body: String,
|
||||
author: Option<String>,
|
||||
theme: Theme,
|
||||
) -> Result<models::ArticleRevision, Error> {
|
||||
if title.is_empty() {
|
||||
Err("title cannot be empty")?;
|
||||
}
|
||||
|
@ -351,7 +418,7 @@ impl<'a> SyncState<'a> {
|
|||
#[derive(Insertable)]
|
||||
#[table_name = "articles"]
|
||||
struct NewArticle {
|
||||
id: Option<i32>
|
||||
id: Option<i32>,
|
||||
}
|
||||
|
||||
let article_id = {
|
||||
|
@ -361,10 +428,17 @@ impl<'a> SyncState<'a> {
|
|||
.execute(self.db_connection)?;
|
||||
sql::<(diesel::sql_types::Integer)>("SELECT LAST_INSERT_ROWID()")
|
||||
.load::<i32>(self.db_connection)?
|
||||
.pop().expect("Statement must evaluate to an integer")
|
||||
.pop()
|
||||
.expect("Statement must evaluate to an integer")
|
||||
};
|
||||
|
||||
let slug = decide_slug(self.db_connection, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?;
|
||||
let slug = decide_slug(
|
||||
self.db_connection,
|
||||
article_id,
|
||||
"",
|
||||
&title,
|
||||
target_slug.as_ref().map(|x| &**x),
|
||||
)?;
|
||||
|
||||
let new_revision = 1;
|
||||
|
||||
|
@ -384,12 +458,17 @@ impl<'a> SyncState<'a> {
|
|||
Ok(article_revisions::table
|
||||
.filter(article_revisions::article_id.eq(article_id))
|
||||
.filter(article_revisions::revision.eq(new_revision))
|
||||
.first::<models::ArticleRevision>(self.db_connection)?
|
||||
)
|
||||
.first::<models::ArticleRevision>(self.db_connection)?)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> Result<Vec<models::SearchResult>, Error> {
|
||||
pub fn search_query(
|
||||
&self,
|
||||
query_string: String,
|
||||
limit: i32,
|
||||
offset: i32,
|
||||
snippet_size: i32,
|
||||
) -> Result<Vec<models::SearchResult>, Error> {
|
||||
use diesel::sql_query;
|
||||
use diesel::sql_types::{Integer, Text};
|
||||
|
||||
|
@ -427,7 +506,10 @@ impl<'a> SyncState<'a> {
|
|||
}
|
||||
|
||||
impl State {
|
||||
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
|
||||
pub fn new(
|
||||
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
|
||||
cpu_pool: futures_cpupool::CpuPool,
|
||||
) -> State {
|
||||
State {
|
||||
connection_pool,
|
||||
cpu_pool,
|
||||
|
@ -453,21 +535,30 @@ impl State {
|
|||
self.execute(move |state| state.get_article_slug(article_id))
|
||||
}
|
||||
|
||||
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
|
||||
pub fn get_article_revision(
|
||||
&self,
|
||||
article_id: i32,
|
||||
revision: i32,
|
||||
) -> CpuFuture<Option<models::ArticleRevision>, Error> {
|
||||
self.execute(move |state| state.get_article_revision(article_id, revision))
|
||||
}
|
||||
|
||||
pub fn query_article_revision_stubs<F>(&self, f: F) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
|
||||
pub fn query_article_revision_stubs<F>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
|
||||
where
|
||||
F: 'static + Send + Sync,
|
||||
for <'a> F:
|
||||
FnOnce(article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>) ->
|
||||
for<'a> F: FnOnce(
|
||||
article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
|
||||
) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
|
||||
{
|
||||
self.execute(move |state| state.query_article_revision_stubs(f))
|
||||
}
|
||||
|
||||
pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
||||
pub fn get_latest_article_revision_stubs(
|
||||
&self,
|
||||
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
||||
self.query_article_revision_stubs(|query| {
|
||||
query
|
||||
.filter(article_revisions::latest.eq(true))
|
||||
|
@ -479,19 +570,38 @@ impl State {
|
|||
self.execute(move |state| state.lookup_slug(slug))
|
||||
}
|
||||
|
||||
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>, theme: Option<Theme>)
|
||||
-> CpuFuture<UpdateResult, Error>
|
||||
{
|
||||
self.execute(move |state| state.update_article(article_id, base_revision, title, body, author, theme))
|
||||
pub fn update_article(
|
||||
&self,
|
||||
article_id: i32,
|
||||
base_revision: i32,
|
||||
title: String,
|
||||
body: String,
|
||||
author: Option<String>,
|
||||
theme: Option<Theme>,
|
||||
) -> CpuFuture<UpdateResult, Error> {
|
||||
self.execute(move |state| {
|
||||
state.update_article(article_id, base_revision, title, body, author, theme)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>, theme: Theme)
|
||||
-> CpuFuture<models::ArticleRevision, Error>
|
||||
{
|
||||
pub fn create_article(
|
||||
&self,
|
||||
target_slug: Option<String>,
|
||||
title: String,
|
||||
body: String,
|
||||
author: Option<String>,
|
||||
theme: Theme,
|
||||
) -> CpuFuture<models::ArticleRevision, Error> {
|
||||
self.execute(move |state| state.create_article(target_slug, title, body, author, theme))
|
||||
}
|
||||
|
||||
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> CpuFuture<Vec<models::SearchResult>, Error> {
|
||||
pub fn search_query(
|
||||
&self,
|
||||
query_string: String,
|
||||
limit: i32,
|
||||
offset: i32,
|
||||
snippet_size: i32,
|
||||
) -> CpuFuture<Vec<models::SearchResult>, Error> {
|
||||
self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size))
|
||||
}
|
||||
}
|
||||
|
@ -505,7 +615,7 @@ mod test {
|
|||
pub fn unwrap(self) -> models::ArticleRevision {
|
||||
match self {
|
||||
UpdateResult::Success(x) => x,
|
||||
_ => panic!("Expected success")
|
||||
_ => panic!("Expected success"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -514,7 +624,7 @@ mod test {
|
|||
($state:ident) => {
|
||||
let db = db::test_connection();
|
||||
let $state = SyncState::new(&db);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -526,7 +636,9 @@ mod test {
|
|||
#[test]
|
||||
fn create_article() {
|
||||
init!(state);
|
||||
let article_revision = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap();
|
||||
let article_revision = state
|
||||
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
assert_eq!("title", article_revision.slug);
|
||||
assert_eq!(true, article_revision.latest);
|
||||
assert_eq!(Theme::Cyan, article_revision.theme);
|
||||
|
@ -536,7 +648,15 @@ mod test {
|
|||
fn create_article_when_empty_slug_then_empty_slug() {
|
||||
// Front page gets to keep its empty slug
|
||||
init!(state);
|
||||
let article_revision = state.create_article(Some("".into()), "Title".into(), "Body".into(), None, Theme::Cyan).unwrap();
|
||||
let article_revision = state
|
||||
.create_article(
|
||||
Some("".into()),
|
||||
"Title".into(),
|
||||
"Body".into(),
|
||||
None,
|
||||
Theme::Cyan,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!("", article_revision.slug);
|
||||
}
|
||||
|
||||
|
@ -544,9 +664,21 @@ mod test {
|
|||
fn update_article() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let new_revision = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None, Some(Theme::BlueGray)).unwrap().unwrap();
|
||||
let new_revision = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"New body".into(),
|
||||
None,
|
||||
Some(Theme::BlueGray),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(article.article_id, new_revision.article_id);
|
||||
|
||||
|
@ -566,10 +698,32 @@ mod test {
|
|||
fn update_article_when_sequential_edits_then_last_wins() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None, Some(Theme::Blue)).unwrap().unwrap();
|
||||
let second_edit = state.update_article(article.article_id, first_edit.revision, article.title.clone(), "Newer body".into(), None, Some(Theme::Amber)).unwrap().unwrap();
|
||||
let first_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"New body".into(),
|
||||
None,
|
||||
Some(Theme::Blue),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let second_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
first_edit.revision,
|
||||
article.title.clone(),
|
||||
"Newer body".into(),
|
||||
None,
|
||||
Some(Theme::Amber),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("Newer body", second_edit.body);
|
||||
assert_eq!(Theme::Amber, second_edit.theme);
|
||||
|
@ -579,10 +733,32 @@ mod test {
|
|||
fn update_article_when_edit_conflict_then_merge() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None, Some(Theme::Blue)).unwrap().unwrap();
|
||||
let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(Theme::Amber)).unwrap().unwrap();
|
||||
let first_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nx\nb\nc\n".into(),
|
||||
None,
|
||||
Some(Theme::Blue),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let second_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nb\ny\nc\n".into(),
|
||||
None,
|
||||
Some(Theme::Amber),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert!(article.revision < first_edit.revision);
|
||||
assert!(first_edit.revision < second_edit.revision);
|
||||
|
@ -595,13 +771,55 @@ mod test {
|
|||
fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx1\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nx3\nb\nc\n".into(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nx1\nb\nc\n".into(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
edit.revision,
|
||||
article.title.clone(),
|
||||
"a\nx1\nx2\nb\nc\n".into(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
edit.revision,
|
||||
article.title.clone(),
|
||||
"a\nx1\nx2\nx3\nb\nc\n".into(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let rebase_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let rebase_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nb\ny\nc\n".into(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert!(article.revision < edit.revision);
|
||||
assert!(edit.revision < rebase_edit.revision);
|
||||
|
@ -613,10 +831,32 @@ mod test {
|
|||
fn update_article_when_title_edit_conflict_then_merge_title() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "titlle".into(), "".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "titlle".into(), "".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let first_edit = state.update_article(article.article_id, article.revision, "Titlle".into(), article.body.clone(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let second_edit = state.update_article(article.article_id, article.revision, "title".into(), article.body.clone(), None, Some(article.theme)).unwrap().unwrap();
|
||||
let first_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
"Titlle".into(),
|
||||
article.body.clone(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let second_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
"title".into(),
|
||||
article.body.clone(),
|
||||
None,
|
||||
Some(article.theme),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert!(article.revision < first_edit.revision);
|
||||
assert!(first_edit.revision < second_edit.revision);
|
||||
|
@ -628,19 +868,51 @@ mod test {
|
|||
fn update_article_when_merge_conflict() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "a".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "a".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "b".into(), None, Some(Theme::Blue)).unwrap().unwrap();
|
||||
let conflict_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "c".into(), None, Some(Theme::Amber)).unwrap();
|
||||
let first_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"b".into(),
|
||||
None,
|
||||
Some(Theme::Blue),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let conflict_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"c".into(),
|
||||
None,
|
||||
Some(Theme::Amber),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
match conflict_edit {
|
||||
UpdateResult::Success(..) => panic!("Expected conflict"),
|
||||
UpdateResult::RebaseConflict(RebaseConflict { base_article, title, body, theme }) => {
|
||||
UpdateResult::RebaseConflict(RebaseConflict {
|
||||
base_article,
|
||||
title,
|
||||
body,
|
||||
theme,
|
||||
}) => {
|
||||
assert_eq!(first_edit.revision, base_article.revision);
|
||||
assert_eq!(title, merge::MergeResult::Clean(article.title.clone()));
|
||||
assert_eq!(body, merge::MergeResult::Conflicted(vec![
|
||||
merge::Output::Conflict(vec!["c"], vec!["a"], vec!["b"]),
|
||||
]).to_strings());
|
||||
assert_eq!(
|
||||
body,
|
||||
merge::MergeResult::Conflicted(vec![merge::Output::Conflict(
|
||||
vec!["c"],
|
||||
vec!["a"],
|
||||
vec!["b"]
|
||||
),])
|
||||
.to_strings()
|
||||
);
|
||||
assert_eq!(Theme::Amber, theme);
|
||||
}
|
||||
};
|
||||
|
@ -650,10 +922,32 @@ mod test {
|
|||
fn update_article_when_theme_conflict_then_ignore_unchanged() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let _first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None, Some(Theme::Blue)).unwrap().unwrap();
|
||||
let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None, Some(Theme::Cyan)).unwrap().unwrap();
|
||||
let _first_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nx\nb\nc\n".into(),
|
||||
None,
|
||||
Some(Theme::Blue),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let second_edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
"a\nb\ny\nc\n".into(),
|
||||
None,
|
||||
Some(Theme::Cyan),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(Theme::Blue, second_edit.theme);
|
||||
}
|
||||
|
@ -662,9 +956,21 @@ mod test {
|
|||
fn update_article_with_no_given_theme_then_theme_unchanged() {
|
||||
init!(state);
|
||||
|
||||
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan).unwrap();
|
||||
let article = state
|
||||
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
|
||||
.unwrap();
|
||||
|
||||
let edit = state.update_article(article.article_id, article.revision, article.title, article.body, None, None).unwrap().unwrap();
|
||||
let edit = state
|
||||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title,
|
||||
article.body,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(Theme::Cyan, edit.theme);
|
||||
}
|
||||
|
|
52
src/theme.rs
52
src/theme.rs
|
@ -8,8 +8,7 @@ use diesel::sqlite::Sqlite;
|
|||
use rand;
|
||||
use seahash;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
#[derive(Serialize, Deserialize)] // Serde
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)] // Serde
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[derive(AsExpression, FromSqlRow)] // Diesel
|
||||
#[sql_type = "Text"]
|
||||
|
@ -40,9 +39,10 @@ use self::Theme::*;
|
|||
forward_display_to_serde!(Theme);
|
||||
forward_from_str_to_serde!(Theme);
|
||||
|
||||
pub const THEMES: [Theme; 19] = [Red, Pink, Purple, DeepPurple, Indigo, Blue,
|
||||
LightBlue, Cyan, Teal, Green, LightGreen, Lime, Yellow, Amber, Orange,
|
||||
DeepOrange, Brown, Gray, BlueGray];
|
||||
pub const THEMES: [Theme; 19] = [
|
||||
Red, Pink, Purple, DeepPurple, Indigo, Blue, LightBlue, Cyan, Teal, Green, LightGreen, Lime,
|
||||
Yellow, Amber, Orange, DeepOrange, Brown, Gray, BlueGray,
|
||||
];
|
||||
|
||||
pub fn theme_from_str_hash(x: &str) -> Theme {
|
||||
let hash = seahash::hash(x.as_bytes()) as usize;
|
||||
|
@ -52,7 +52,8 @@ pub fn theme_from_str_hash(x: &str) -> Theme {
|
|||
|
||||
pub fn random() -> Theme {
|
||||
use rand::Rng;
|
||||
*rand::thread_rng().choose(&THEMES)
|
||||
*rand::thread_rng()
|
||||
.choose(&THEMES)
|
||||
.expect("Could only fail for an empty slice")
|
||||
}
|
||||
|
||||
|
@ -73,7 +74,6 @@ impl FromSql<Text, Sqlite> for Theme {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub struct CssClass(Theme);
|
||||
|
||||
impl Theme {
|
||||
|
@ -90,7 +90,6 @@ impl Display for CssClass {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::error::Error;
|
||||
|
@ -111,13 +110,18 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn serialize_kebab_case() {
|
||||
assert_eq!(serde_plain::to_string(&Theme::LightGreen).unwrap(), "light-green");
|
||||
assert_eq!(
|
||||
serde_plain::to_string(&Theme::LightGreen).unwrap(),
|
||||
"light-green"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_json() {
|
||||
#[derive(Serialize)]
|
||||
struct Test { x: Theme }
|
||||
struct Test {
|
||||
x: Theme,
|
||||
}
|
||||
assert_eq!(
|
||||
serde_json::to_string(&Test { x: Theme::Red }).unwrap(),
|
||||
"{\"x\":\"red\"}"
|
||||
|
@ -127,7 +131,9 @@ mod test {
|
|||
#[test]
|
||||
fn deserialize_json() {
|
||||
#[derive(Deserialize, Debug, PartialEq, Eq)]
|
||||
struct Test { x: Theme }
|
||||
struct Test {
|
||||
x: Theme,
|
||||
}
|
||||
assert_eq!(
|
||||
serde_json::from_str::<Test>("{\"x\":\"red\"}").unwrap(),
|
||||
Test { x: Theme::Red }
|
||||
|
@ -137,7 +143,9 @@ mod test {
|
|||
#[test]
|
||||
fn serialize_urlencoded() {
|
||||
#[derive(Serialize)]
|
||||
struct Test { x: Theme }
|
||||
struct Test {
|
||||
x: Theme,
|
||||
}
|
||||
assert_eq!(
|
||||
serde_urlencoded::to_string(&Test { x: Theme::Red }).unwrap(),
|
||||
"x=red"
|
||||
|
@ -147,7 +155,9 @@ mod test {
|
|||
#[test]
|
||||
fn deserialize_urlencoded() {
|
||||
#[derive(Deserialize, Debug, PartialEq, Eq)]
|
||||
struct Test { x: Theme }
|
||||
struct Test {
|
||||
x: Theme,
|
||||
}
|
||||
assert_eq!(
|
||||
serde_urlencoded::from_str::<Test>("x=red").unwrap(),
|
||||
Test { x: Theme::Red }
|
||||
|
@ -192,7 +202,10 @@ mod test {
|
|||
let conn = SqliteConnection::establish(":memory:")?;
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
struct Row { #[sql_type = "Text"] theme: Theme }
|
||||
struct Row {
|
||||
#[sql_type = "Text"]
|
||||
theme: Theme,
|
||||
}
|
||||
|
||||
let res = sql_query("SELECT ? as theme")
|
||||
.bind::<Text, _>(DeepPurple)
|
||||
|
@ -208,14 +221,15 @@ mod test {
|
|||
let conn = SqliteConnection::establish(":memory:")?;
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
struct Row { #[sql_type = "Text"] theme: Theme }
|
||||
struct Row {
|
||||
#[sql_type = "Text"]
|
||||
theme: Theme,
|
||||
}
|
||||
|
||||
let res = sql_query("SELECT 'green' as theme")
|
||||
.load::<Row>(&conn);
|
||||
let res = sql_query("SELECT 'green' as theme").load::<Row>(&conn);
|
||||
assert!(res.is_ok());
|
||||
|
||||
let res = sql_query("SELECT 'blueish-yellow' as theme")
|
||||
.load::<Row>(&conn);
|
||||
let res = sql_query("SELECT 'blueish-yellow' as theme").load::<Row>(&conn);
|
||||
assert!(res.is_err());
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
mod resource;
|
||||
mod lookup;
|
||||
mod resource;
|
||||
|
||||
pub use self::resource::*;
|
||||
pub use self::lookup::*;
|
||||
pub use self::resource::*;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use futures;
|
||||
use futures::{Future, Stream};
|
||||
use hyper::{self, header, mime, server};
|
||||
use hyper::server::Response;
|
||||
use hyper::{self, header, mime, server};
|
||||
use std;
|
||||
|
||||
lazy_static! {
|
||||
|
@ -23,22 +23,24 @@ pub trait Resource {
|
|||
}
|
||||
|
||||
fn put(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
|
||||
where Self: 'static
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
Box::new(body
|
||||
.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
|
||||
Box::new(
|
||||
body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
|
||||
.map_err(Into::into)
|
||||
.and_then(move |_| futures::finished(self.method_not_allowed()))
|
||||
.and_then(move |_| futures::finished(self.method_not_allowed())),
|
||||
)
|
||||
}
|
||||
|
||||
fn post(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
|
||||
where Self: 'static
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
Box::new(body
|
||||
.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
|
||||
Box::new(
|
||||
body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
|
||||
.map_err(Into::into)
|
||||
.and_then(move |_| futures::finished(self.method_not_allowed()))
|
||||
.and_then(move |_| futures::finished(self.method_not_allowed())),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@ use std::borrow::Cow;
|
|||
use std::collections::HashMap;
|
||||
use std::str::Utf8Error;
|
||||
|
||||
use futures::{Future, finished, failed, done};
|
||||
use futures::future::FutureResult;
|
||||
use futures::{done, failed, finished, Future};
|
||||
use percent_encoding::percent_decode;
|
||||
use slug::slugify;
|
||||
|
||||
|
@ -54,9 +54,10 @@ fn split_one(path: &str) -> Result<(Cow<str>, Option<&str>), Utf8Error> {
|
|||
Ok((head, tail))
|
||||
}
|
||||
|
||||
fn map_lookup(map: &HashMap<&str, ResourceFn>, path: &str) ->
|
||||
FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>>
|
||||
{
|
||||
fn map_lookup(
|
||||
map: &HashMap<&str, ResourceFn>,
|
||||
path: &str,
|
||||
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
|
||||
let (head, tail) = match split_one(path) {
|
||||
Ok(x) => x,
|
||||
Err(x) => return failed(x.into()),
|
||||
|
@ -73,9 +74,10 @@ fn map_lookup(map: &HashMap<&str, ResourceFn>, path: &str) ->
|
|||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn fs_lookup(root: &str, path: &str) ->
|
||||
FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>>
|
||||
{
|
||||
fn fs_lookup(
|
||||
root: &str,
|
||||
path: &str,
|
||||
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
|
||||
use std::fs::File;
|
||||
use std::io::prelude::*;
|
||||
|
||||
|
@ -87,17 +89,17 @@ fn fs_lookup(root: &str, path: &str) ->
|
|||
Some("js") => "application/javascript",
|
||||
Some("woff") => "application/font-woff",
|
||||
_ => "application/binary",
|
||||
}.parse().unwrap();
|
||||
}
|
||||
.parse()
|
||||
.unwrap();
|
||||
|
||||
let mut filename = root.to_string();
|
||||
filename.push_str(path);
|
||||
|
||||
let mut f = File::open(&filename)
|
||||
.unwrap_or_else(|_| panic!("Not found: {}", filename));
|
||||
let mut f = File::open(&filename).unwrap_or_else(|_| panic!("Not found: {}", filename));
|
||||
|
||||
let mut body = Vec::new();
|
||||
f.read_to_end(&mut body)
|
||||
.expect("Unable to read file");
|
||||
f.read_to_end(&mut body).expect("Unable to read file");
|
||||
|
||||
finished(Some(Box::new(ReadOnlyResource { content_type, body })))
|
||||
}
|
||||
|
@ -108,7 +110,12 @@ impl WikiLookup {
|
|||
let diff_lookup = DiffLookup::new(state.clone());
|
||||
let search_lookup = SearchLookup::new(state.clone());
|
||||
|
||||
WikiLookup { state, changes_lookup, diff_lookup, search_lookup }
|
||||
WikiLookup {
|
||||
state,
|
||||
changes_lookup,
|
||||
diff_lookup,
|
||||
search_lookup,
|
||||
}
|
||||
}
|
||||
|
||||
fn revisions_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Lookup>::Future {
|
||||
|
@ -126,12 +133,12 @@ impl WikiLookup {
|
|||
};
|
||||
|
||||
Box::new(
|
||||
self.state.get_article_revision(article_id, revision)
|
||||
.and_then(|article_revision|
|
||||
Ok(article_revision.map(move |x| Box::new(
|
||||
ArticleRevisionResource::new(x)
|
||||
) as BoxResource))
|
||||
)
|
||||
self.state
|
||||
.get_article_revision(article_id, revision)
|
||||
.and_then(|article_revision| {
|
||||
Ok(article_revision
|
||||
.map(move |x| Box::new(ArticleRevisionResource::new(x)) as BoxResource))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -148,14 +155,11 @@ impl WikiLookup {
|
|||
Err(_) => return Box::new(finished(None)),
|
||||
};
|
||||
|
||||
Box::new(
|
||||
self.state.get_article_slug(article_id)
|
||||
.and_then(|slug|
|
||||
Ok(slug.map(|slug| Box::new(
|
||||
TemporaryRedirectResource::new(format!("../{}", slug))
|
||||
) as BoxResource))
|
||||
)
|
||||
)
|
||||
Box::new(self.state.get_article_slug(article_id).and_then(|slug| {
|
||||
Ok(slug.map(|slug| {
|
||||
Box::new(TemporaryRedirectResource::new(format!("../{}", slug))) as BoxResource
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
fn diff_lookup_f(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future {
|
||||
|
@ -181,30 +185,30 @@ impl WikiLookup {
|
|||
};
|
||||
|
||||
match (head.as_ref(), tail) {
|
||||
("_about", None) =>
|
||||
Box::new(finished(Some(Box::new(AboutResource::new()) as BoxResource))),
|
||||
("_about", Some(license)) =>
|
||||
Box::new(map_lookup(&LICENSES_MAP, license)),
|
||||
("_about", None) => Box::new(finished(Some(
|
||||
Box::new(AboutResource::new()) as BoxResource
|
||||
))),
|
||||
("_about", Some(license)) => Box::new(map_lookup(&LICENSES_MAP, license)),
|
||||
#[cfg(feature = "dynamic-assets")]
|
||||
("_assets", Some(asset)) =>
|
||||
Box::new(fs_lookup(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"), asset)),
|
||||
("_assets", Some(asset)) => Box::new(fs_lookup(
|
||||
concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"),
|
||||
asset,
|
||||
)),
|
||||
#[cfg(not(feature = "dynamic-assets"))]
|
||||
("_assets", Some(asset)) =>
|
||||
Box::new(map_lookup(&ASSETS_MAP, asset)),
|
||||
("_by_id", Some(tail)) =>
|
||||
self.by_id_lookup(tail, query),
|
||||
("_changes", None) =>
|
||||
Box::new(self.changes_lookup.lookup(query)),
|
||||
("_diff", Some(tail)) =>
|
||||
self.diff_lookup_f(tail, query),
|
||||
("_new", None) =>
|
||||
Box::new(finished(Some(Box::new(NewArticleResource::new(self.state.clone(), None, true)) as BoxResource))),
|
||||
("_revisions", Some(tail)) =>
|
||||
self.revisions_lookup(tail, query),
|
||||
("_search", None) =>
|
||||
Box::new(done(self.search_lookup.lookup(query))),
|
||||
("_sitemap", None) =>
|
||||
Box::new(finished(Some(Box::new(SitemapResource::new(self.state.clone())) as BoxResource))),
|
||||
("_assets", Some(asset)) => Box::new(map_lookup(&ASSETS_MAP, asset)),
|
||||
("_by_id", Some(tail)) => self.by_id_lookup(tail, query),
|
||||
("_changes", None) => Box::new(self.changes_lookup.lookup(query)),
|
||||
("_diff", Some(tail)) => self.diff_lookup_f(tail, query),
|
||||
("_new", None) => Box::new(finished(Some(Box::new(NewArticleResource::new(
|
||||
self.state.clone(),
|
||||
None,
|
||||
true,
|
||||
)) as BoxResource))),
|
||||
("_revisions", Some(tail)) => self.revisions_lookup(tail, query),
|
||||
("_search", None) => Box::new(done(self.search_lookup.lookup(query))),
|
||||
("_sitemap", None) => Box::new(finished(Some(Box::new(SitemapResource::new(
|
||||
self.state.clone(),
|
||||
)) as BoxResource))),
|
||||
_ => Box::new(finished(None)),
|
||||
}
|
||||
}
|
||||
|
@ -226,7 +230,7 @@ impl WikiLookup {
|
|||
let slugified_slug = slugify(&slug);
|
||||
if slugified_slug != slug {
|
||||
return Box::new(finished(Some(
|
||||
Box::new(TemporaryRedirectResource::from_slug(slugified_slug, edit)) as BoxResource
|
||||
Box::new(TemporaryRedirectResource::from_slug(slugified_slug, edit)) as BoxResource,
|
||||
)));
|
||||
}
|
||||
|
||||
|
@ -234,16 +238,22 @@ impl WikiLookup {
|
|||
let slug = slug.into_owned();
|
||||
|
||||
use crate::state::SlugLookup;
|
||||
Box::new(self.state.lookup_slug(slug.clone())
|
||||
.and_then(move |x| Ok(Some(match x {
|
||||
SlugLookup::Miss =>
|
||||
Box::new(NewArticleResource::new(state, Some(slug), edit)) as BoxResource,
|
||||
SlugLookup::Hit { article_id, revision } =>
|
||||
Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource,
|
||||
SlugLookup::Redirect(slug) =>
|
||||
Box::new(TemporaryRedirectResource::from_slug(slug, edit)) as BoxResource,
|
||||
})))
|
||||
)
|
||||
Box::new(self.state.lookup_slug(slug.clone()).and_then(move |x| {
|
||||
Ok(Some(match x {
|
||||
SlugLookup::Miss => {
|
||||
Box::new(NewArticleResource::new(state, Some(slug), edit)) as BoxResource
|
||||
}
|
||||
SlugLookup::Hit {
|
||||
article_id,
|
||||
revision,
|
||||
} => {
|
||||
Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource
|
||||
}
|
||||
SlugLookup::Redirect(slug) => {
|
||||
Box::new(TemporaryRedirectResource::from_slug(slug, edit)) as BoxResource
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue