cargo clippy --fix
This commit is contained in:
parent
9320d14d89
commit
95a73b9471
26 changed files with 62 additions and 110 deletions
6
build.rs
6
build.rs
|
@ -34,10 +34,8 @@ fn main() {
|
|||
|
||||
let _ignore_failure = std::fs::remove_file(db_path);
|
||||
|
||||
let connection = SqliteConnection::establish(db_path).expect(&format!(
|
||||
"Error esablishing a database connection to {}",
|
||||
db_path
|
||||
));
|
||||
let connection = SqliteConnection::establish(db_path)
|
||||
.unwrap_or_else(|_| panic!("Error esablishing a database connection to {}", db_path));
|
||||
|
||||
// Integer is a dummy placeholder. Compiling fails when passing ().
|
||||
diesel::expression::sql_literal::sql::<diesel::sql_types::Integer>("PRAGMA foreign_keys = ON")
|
||||
|
|
|
@ -23,7 +23,7 @@ lazy_static! {
|
|||
#[cfg(feature = "dynamic-assets")]
|
||||
components.push("dynamic-assets".into());
|
||||
|
||||
if let None = option_env!("CONTINUOUS_INTEGRATION") {
|
||||
if option_env!("CONTINUOUS_INTEGRATION").is_none() {
|
||||
components.push("local-build".into());
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,7 @@ lazy_static! {
|
|||
));
|
||||
}
|
||||
|
||||
if components.len() > 0 {
|
||||
if !components.is_empty() {
|
||||
format!("{} ({})", env!("CARGO_PKG_VERSION"), components.join(" "))
|
||||
} else {
|
||||
env!("CARGO_PKG_VERSION").to_string()
|
||||
|
|
|
@ -23,17 +23,17 @@ impl CustomizeConnection<SqliteConnection, r2d2_diesel::Error> for SqliteInitial
|
|||
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), r2d2_diesel::Error> {
|
||||
sql::<Integer>("PRAGMA foreign_keys = ON")
|
||||
.execute(conn)
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
.map_err(r2d2_diesel::Error::QueryError)?;
|
||||
|
||||
sqlfunc::markdown_to_fts::register_impl(conn, |text: String| {
|
||||
rendering::render_markdown_for_fts(&text)
|
||||
})
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
.map_err(r2d2_diesel::Error::QueryError)?;
|
||||
|
||||
sqlfunc::theme_from_str_hash::register_impl(conn, |title: String| {
|
||||
theme::theme_from_str_hash(&title)
|
||||
})
|
||||
.map_err(|x| r2d2_diesel::Error::QueryError(x))?;
|
||||
.map_err(r2d2_diesel::Error::QueryError)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use diff;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::fmt::Debug;
|
||||
|
||||
use diff;
|
||||
use diff::Result::*;
|
||||
|
||||
use super::chunk::Chunk;
|
||||
|
@ -68,7 +67,7 @@ where
|
|||
return Some(chunk);
|
||||
}
|
||||
_ => {
|
||||
if self.left.len() > 0 || self.right.len() > 0 {
|
||||
if !self.left.is_empty() || !self.right.is_empty() {
|
||||
let chunk = Chunk(self.left, self.right);
|
||||
self.left = &self.left[self.left.len()..];
|
||||
self.right = &self.right[self.right.len()..];
|
||||
|
@ -84,7 +83,6 @@ where
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use diff;
|
||||
|
||||
#[test]
|
||||
fn simple_case() {
|
||||
|
|
|
@ -4,8 +4,6 @@ mod output;
|
|||
|
||||
use std::fmt::Debug;
|
||||
|
||||
use diff;
|
||||
|
||||
use self::chunk_iterator::ChunkIterator;
|
||||
use self::output::Output::Resolved;
|
||||
use self::output::*;
|
||||
|
@ -132,7 +130,6 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use diff;
|
||||
|
||||
use super::output::Output::*;
|
||||
use super::output::*;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::fmt::Debug;
|
||||
|
||||
use diff;
|
||||
use diff::Result::*;
|
||||
|
||||
use super::chunk::Chunk;
|
||||
|
@ -67,17 +66,16 @@ pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>)
|
|||
return Output::Resolved(choose_right(chunk.0));
|
||||
}
|
||||
|
||||
return Output::Conflict(
|
||||
Output::Conflict(
|
||||
choose_right(chunk.0),
|
||||
choose_left(chunk.0),
|
||||
choose_right(chunk.1),
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use diff::Result::*;
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use chrono;
|
||||
|
||||
use crate::theme::Theme;
|
||||
|
||||
fn slug_link(slug: &str) -> &str {
|
||||
|
|
|
@ -34,7 +34,7 @@ pub fn render_markdown_for_fts(src: &str) -> String {
|
|||
buf.push_str(&uri.replace(is_html_special, " "));
|
||||
buf.push_str(") ");
|
||||
}
|
||||
_ => buf.push_str(" "),
|
||||
_ => buf.push(' '),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
||||
|
@ -88,7 +88,7 @@ impl Resource for AboutResource {
|
|||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"About Sausagewiki",
|
||||
Template {
|
||||
deps: &*LICENSE_INFOS,
|
||||
deps: *LICENSE_INFOS,
|
||||
},
|
||||
)
|
||||
.to_string(),
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
use chrono::{DateTime, Local, TimeZone};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::{ContentType, Location};
|
||||
use hyper::server::*;
|
||||
use serde_json;
|
||||
use serde_urlencoded;
|
||||
|
||||
use crate::assets::ScriptJs;
|
||||
use crate::mimes::*;
|
||||
|
@ -90,7 +88,7 @@ pub fn last_updated(article_id: i32, created: &DateTime<Local>, author: Option<&
|
|||
.into_link()
|
||||
),
|
||||
author: author.map(|author| Author {
|
||||
author: &author,
|
||||
author,
|
||||
history: format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
|
@ -134,7 +132,7 @@ impl Resource for ArticleResource {
|
|||
last_updated: Some(&last_updated(
|
||||
data.article_id,
|
||||
&Local.from_utc_datetime(&data.created),
|
||||
data.author.as_ref().map(|x| &**x),
|
||||
data.author.as_deref(),
|
||||
)),
|
||||
edit: self.edit,
|
||||
cancel_url: Some(data.link()),
|
||||
|
@ -213,7 +211,7 @@ impl Resource for ArticleResource {
|
|||
last_updated: &last_updated(
|
||||
updated.article_id,
|
||||
&Local.from_utc_datetime(&updated.created),
|
||||
updated.author.as_ref().map(|x| &**x),
|
||||
updated.author.as_deref(),
|
||||
),
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
|
@ -245,7 +243,7 @@ impl Resource for ArticleResource {
|
|||
last_updated: &last_updated(
|
||||
base_article.article_id,
|
||||
&Local.from_utc_datetime(&base_article.created),
|
||||
base_article.author.as_ref().map(|x| &**x),
|
||||
base_article.author.as_deref(),
|
||||
),
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
|
@ -301,7 +299,7 @@ impl Resource for ArticleResource {
|
|||
last_updated: Some(&last_updated(
|
||||
base_article.article_id,
|
||||
&Local.from_utc_datetime(&base_article.created),
|
||||
base_article.author.as_ref().map(|x| &**x),
|
||||
base_article.author.as_deref(),
|
||||
)),
|
||||
edit: true,
|
||||
cancel_url: Some(base_article.link()),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use chrono::{DateTime, Local, TimeZone};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
||||
|
@ -55,7 +55,7 @@ pub fn timestamp_and_author(
|
|||
.into_link()
|
||||
),
|
||||
author: author.map(|author| Author {
|
||||
author: &author,
|
||||
author,
|
||||
history: format!(
|
||||
"_changes{}",
|
||||
QueryParameters::default()
|
||||
|
@ -107,7 +107,7 @@ impl Resource for ArticleRevisionResource {
|
|||
data.sequence_number,
|
||||
data.article_id,
|
||||
&Local.from_utc_datetime(&data.created),
|
||||
data.author.as_ref().map(|x| &**x),
|
||||
data.author.as_deref(),
|
||||
),
|
||||
diff_link: if data.revision > 1 {
|
||||
Some(format!(
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
use diesel;
|
||||
use futures::future::{done, finished};
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
use serde_urlencoded;
|
||||
|
||||
use crate::mimes::*;
|
||||
use crate::schema::article_revisions;
|
||||
|
@ -75,7 +73,7 @@ impl QueryParameters {
|
|||
|
||||
pub fn into_link(self) -> String {
|
||||
let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail");
|
||||
if args.len() > 0 {
|
||||
if !args.is_empty() {
|
||||
format!("?{}", args)
|
||||
} else {
|
||||
"_changes".to_owned()
|
||||
|
@ -127,7 +125,7 @@ impl ChangesLookup {
|
|||
|
||||
let limit = match params.limit {
|
||||
None => Ok(DEFAULT_LIMIT),
|
||||
Some(x) if 1 <= x && x <= 100 => Ok(x),
|
||||
Some(x) if (1..=100).contains(&x) => Ok(x),
|
||||
_ => Err("`limit` argument must be in range [1, 100]"),
|
||||
}?;
|
||||
|
||||
|
@ -309,7 +307,7 @@ impl Resource for ChangesResource {
|
|||
fn subject_clause(&self) -> String {
|
||||
match self.resource.article_id {
|
||||
Some(x) => format!(" <a href=\"_by_id/{}\">this article</a>", x),
|
||||
None => format!(" the wiki"),
|
||||
None => " the wiki".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,8 +330,8 @@ impl Resource for ChangesResource {
|
|||
}
|
||||
|
||||
let (before, article_id, author, limit) = (
|
||||
self.before.clone(),
|
||||
self.article_id.clone(),
|
||||
self.before,
|
||||
self.article_id,
|
||||
self.author.clone(),
|
||||
self.limit,
|
||||
);
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
use std::fmt;
|
||||
|
||||
use diff;
|
||||
use futures::future::done;
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
use serde_urlencoded;
|
||||
|
||||
use crate::mimes::*;
|
||||
use crate::models::ArticleRevision;
|
||||
|
@ -127,7 +125,7 @@ impl Resource for DiffResource {
|
|||
let consecutive = self.to.revision - self.from.revision == 1;
|
||||
|
||||
let author = match consecutive {
|
||||
true => self.to.author.as_ref().map(|x| &**x),
|
||||
true => self.to.author.as_deref(),
|
||||
false => None,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::{ContentType, Location};
|
||||
use hyper::server::*;
|
||||
use serde_json;
|
||||
use serde_urlencoded;
|
||||
|
||||
use crate::assets::ScriptJs;
|
||||
use crate::mimes::*;
|
||||
|
@ -100,7 +98,7 @@ impl Resource for NewArticleResource {
|
|||
revision: NEW,
|
||||
last_updated: None,
|
||||
edit: self.edit,
|
||||
cancel_url: self.slug.as_ref().map(|x| &**x),
|
||||
cancel_url: self.slug.as_deref(),
|
||||
title: &title,
|
||||
raw: "",
|
||||
rendered: EMPTY_ARTICLE_MESSAGE,
|
||||
|
@ -182,7 +180,7 @@ impl Resource for NewArticleResource {
|
|||
last_updated: &super::article_resource::last_updated(
|
||||
updated.article_id,
|
||||
&Local.from_utc_datetime(&updated.created),
|
||||
updated.author.as_ref().map(|x| &**x),
|
||||
updated.author.as_deref(),
|
||||
),
|
||||
})
|
||||
.expect("Should never fail"),
|
||||
|
|
|
@ -40,9 +40,9 @@ impl<T> PaginationStruct<T> {
|
|||
|
||||
pub fn _from_str<'a, T: serde::Deserialize<'a>>(s: &'a str) -> Result<Pagination<T>, Error> {
|
||||
let pagination: PaginationStruct<T> = serde_urlencoded::from_str(s).map_err(|_| Error)?; // TODO Proper error reporting
|
||||
Ok(pagination.into_enum()?)
|
||||
pagination.into_enum()
|
||||
}
|
||||
|
||||
pub fn from_fields<T>(after: Option<T>, before: Option<T>) -> Result<Pagination<T>, Error> {
|
||||
Ok(PaginationStruct { after, before }.into_enum()?)
|
||||
PaginationStruct { after, before }.into_enum()
|
||||
}
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::{Accept, ContentType};
|
||||
use hyper::server::*;
|
||||
use serde_json;
|
||||
use serde_urlencoded;
|
||||
|
||||
use crate::mimes::*;
|
||||
use crate::models::SearchResult;
|
||||
|
@ -56,7 +54,7 @@ impl QueryParameters {
|
|||
|
||||
pub fn into_link(self) -> String {
|
||||
let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail");
|
||||
if args.len() > 0 {
|
||||
if !args.is_empty() {
|
||||
format!("_search?{}", args)
|
||||
} else {
|
||||
"_search".to_owned()
|
||||
|
@ -185,11 +183,7 @@ impl Resource for SearchResource {
|
|||
}
|
||||
|
||||
// TODO: Show a search "front page" when no query is given:
|
||||
let query = self
|
||||
.query
|
||||
.as_ref()
|
||||
.map(|x| x.clone())
|
||||
.unwrap_or("".to_owned());
|
||||
let query = self.query.as_ref().cloned().unwrap_or("".to_owned());
|
||||
|
||||
let data = self.state.search_query(
|
||||
query,
|
||||
|
@ -224,7 +218,7 @@ impl Resource for SearchResource {
|
|||
match &self.response_type {
|
||||
&ResponseType::Json => Ok(head.with_body(
|
||||
serde_json::to_string(&JsonResponse {
|
||||
query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
|
||||
query: self.query.as_deref().unwrap_or(""),
|
||||
hits: &data,
|
||||
prev,
|
||||
next,
|
||||
|
@ -236,7 +230,7 @@ impl Resource for SearchResource {
|
|||
None, // Hmm, should perhaps accept `base` as argument
|
||||
"Search",
|
||||
&Template {
|
||||
query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
|
||||
query: self.query.as_deref().unwrap_or(""),
|
||||
hits: &data.iter().enumerate().collect::<Vec<_>>(),
|
||||
prev,
|
||||
next,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::ContentType;
|
||||
use hyper::server::*;
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::Location;
|
||||
use hyper::server::*;
|
||||
|
||||
|
|
22
src/site.rs
22
src/site.rs
|
@ -4,7 +4,7 @@
|
|||
use std::fmt;
|
||||
|
||||
use futures::{self, Future};
|
||||
use hyper;
|
||||
|
||||
use hyper::header::{Accept, ContentType, Server};
|
||||
use hyper::mime;
|
||||
use hyper::server::*;
|
||||
|
@ -118,12 +118,12 @@ impl Site {
|
|||
}
|
||||
|
||||
fn root_base_from_request_uri(path: &str) -> Option<String> {
|
||||
assert!(path.starts_with("/"));
|
||||
assert!(path.starts_with('/'));
|
||||
let slashes = path[1..].matches('/').count();
|
||||
|
||||
match slashes {
|
||||
0 => None,
|
||||
n => Some(::std::iter::repeat("../").take(n).collect()),
|
||||
n => Some("../".repeat(n)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,10 +143,7 @@ impl Service for Site {
|
|||
false => None,
|
||||
};
|
||||
|
||||
let accept_header = headers
|
||||
.get()
|
||||
.map(|x: &Accept| x.clone())
|
||||
.unwrap_or(Accept(vec![]));
|
||||
let accept_header = headers.get().cloned().unwrap_or(Accept(vec![]));
|
||||
|
||||
let base = root_base_from_request_uri(uri.path());
|
||||
let base2 = base.clone(); // Bah, stupid clone
|
||||
|
@ -167,16 +164,9 @@ impl Service for Site {
|
|||
_ => Box::new(futures::finished(resource.method_not_allowed())),
|
||||
}
|
||||
}
|
||||
None => Box::new(futures::finished(Self::not_found(
|
||||
base.as_ref().map(|x| &**x),
|
||||
))),
|
||||
})
|
||||
.or_else(move |err| {
|
||||
Ok(Self::internal_server_error(
|
||||
base2.as_ref().map(|x| &**x),
|
||||
err,
|
||||
))
|
||||
None => Box::new(futures::finished(Self::not_found(base.as_deref()))),
|
||||
})
|
||||
.or_else(move |err| Ok(Self::internal_server_error(base2.as_deref(), err)))
|
||||
.map(|response| response.with_header(SERVER.clone())),
|
||||
)
|
||||
}
|
||||
|
|
23
src/state.rs
23
src/state.rs
|
@ -1,6 +1,3 @@
|
|||
use std;
|
||||
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use futures_cpupool::{self, CpuFuture};
|
||||
|
@ -72,7 +69,7 @@ fn decide_slug(
|
|||
let base_slug = ::slug::slugify(title);
|
||||
|
||||
if let Some(prev_slug) = prev_slug {
|
||||
if prev_slug == "" {
|
||||
if prev_slug.is_empty() {
|
||||
// Never give a non-empty slug to the front page
|
||||
return Ok(String::new());
|
||||
}
|
||||
|
@ -313,7 +310,7 @@ impl<'a> SyncState<'a> {
|
|||
theme: Option<Theme>,
|
||||
) -> Result<UpdateResult, Error> {
|
||||
if title.is_empty() {
|
||||
Err("title cannot be empty")?;
|
||||
return Err("title cannot be empty".into());
|
||||
}
|
||||
|
||||
self.db_connection.transaction(|| {
|
||||
|
@ -332,7 +329,7 @@ impl<'a> SyncState<'a> {
|
|||
// This scheme would make POST idempotent.
|
||||
|
||||
if base_revision > latest_revision {
|
||||
Err("This edit is based on a future version of the article")?;
|
||||
return Err("This edit is based on a future version of the article".into());
|
||||
}
|
||||
|
||||
let theme = theme.unwrap_or(prev_theme);
|
||||
|
@ -375,7 +372,7 @@ impl<'a> SyncState<'a> {
|
|||
slug: &slug,
|
||||
title: &title,
|
||||
body: &body,
|
||||
author: author.as_ref().map(|x| &**x),
|
||||
author: author.as_deref(),
|
||||
latest: true,
|
||||
theme,
|
||||
})
|
||||
|
@ -399,7 +396,7 @@ impl<'a> SyncState<'a> {
|
|||
theme: Theme,
|
||||
) -> Result<models::ArticleRevision, Error> {
|
||||
if title.is_empty() {
|
||||
Err("title cannot be empty")?;
|
||||
return Err("title cannot be empty".into());
|
||||
}
|
||||
|
||||
self.db_connection.transaction(|| {
|
||||
|
@ -425,7 +422,7 @@ impl<'a> SyncState<'a> {
|
|||
article_id,
|
||||
"",
|
||||
&title,
|
||||
target_slug.as_ref().map(|x| &**x),
|
||||
target_slug.as_deref(),
|
||||
)?;
|
||||
|
||||
let new_revision = 1;
|
||||
|
@ -437,7 +434,7 @@ impl<'a> SyncState<'a> {
|
|||
slug: &slug,
|
||||
title: &title,
|
||||
body: &body,
|
||||
author: author.as_ref().map(|x| &**x),
|
||||
author: author.as_deref(),
|
||||
latest: true,
|
||||
theme,
|
||||
})
|
||||
|
@ -705,7 +702,7 @@ mod test {
|
|||
.update_article(
|
||||
article.article_id,
|
||||
first_edit.revision,
|
||||
article.title.clone(),
|
||||
article.title,
|
||||
"Newer body".into(),
|
||||
None,
|
||||
Some(Theme::Amber),
|
||||
|
@ -891,7 +888,7 @@ mod test {
|
|||
theme,
|
||||
}) => {
|
||||
assert_eq!(first_edit.revision, base_article.revision);
|
||||
assert_eq!(title, merge::MergeResult::Clean(article.title.clone()));
|
||||
assert_eq!(title, merge::MergeResult::Clean(article.title));
|
||||
assert_eq!(
|
||||
body,
|
||||
merge::MergeResult::Conflicted(vec![merge::Output::Conflict(
|
||||
|
@ -929,7 +926,7 @@ mod test {
|
|||
.update_article(
|
||||
article.article_id,
|
||||
article.revision,
|
||||
article.title.clone(),
|
||||
article.title,
|
||||
"a\nb\ny\nc\n".into(),
|
||||
None,
|
||||
Some(Theme::Cyan),
|
||||
|
|
|
@ -5,8 +5,6 @@ use diesel::deserialize::{self, FromSql};
|
|||
use diesel::serialize::{self, Output, ToSql};
|
||||
use diesel::sql_types::Text;
|
||||
use diesel::sqlite::Sqlite;
|
||||
use rand;
|
||||
use seahash;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)] // Serde
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
|
@ -97,9 +95,6 @@ mod test {
|
|||
use diesel::prelude::*;
|
||||
use diesel::sql_query;
|
||||
use diesel::sql_types::Text;
|
||||
use serde_json;
|
||||
use serde_plain;
|
||||
use serde_urlencoded;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use futures;
|
||||
|
||||
pub trait Lookup {
|
||||
type Resource;
|
||||
type Error;
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use futures;
|
||||
use futures::{Future, Stream};
|
||||
use hyper::server::Response;
|
||||
use hyper::{self, header, mime, server};
|
||||
use std;
|
||||
|
||||
lazy_static! {
|
||||
static ref TEXT_PLAIN: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
|
||||
|
|
|
@ -81,7 +81,7 @@ fn fs_lookup(
|
|||
use std::fs::File;
|
||||
use std::io::prelude::*;
|
||||
|
||||
let extension = path.rsplitn(2, ".").next();
|
||||
let extension = path.rsplit_once('.').map(|x| x.1);
|
||||
|
||||
let content_type = match extension {
|
||||
Some("html") => "text/html",
|
||||
|
@ -263,10 +263,10 @@ impl Lookup for WikiLookup {
|
|||
type Future = Box<dyn Future<Item = Option<Self::Resource>, Error = Self::Error>>;
|
||||
|
||||
fn lookup(&self, path: &str, query: Option<&str>) -> Self::Future {
|
||||
assert!(path.starts_with("/"));
|
||||
assert!(path.starts_with('/'));
|
||||
let path = &path[1..];
|
||||
|
||||
if path.starts_with("_") {
|
||||
if path.starts_with('_') {
|
||||
self.reserved_lookup(path, query)
|
||||
} else {
|
||||
self.article_lookup(path, query)
|
||||
|
|
Loading…
Reference in a new issue