2017-08-20 22:59:16 +03:00
|
|
|
use std;
|
|
|
|
|
2017-09-05 18:07:57 +03:00
|
|
|
use diesel;
|
2017-08-20 22:59:16 +03:00
|
|
|
use diesel::sqlite::SqliteConnection;
|
2017-08-21 00:44:52 +03:00
|
|
|
use diesel::prelude::*;
|
2017-09-08 17:21:24 +03:00
|
|
|
use futures_cpupool::{self, CpuFuture};
|
2017-09-05 15:55:10 +03:00
|
|
|
use r2d2::Pool;
|
|
|
|
use r2d2_diesel::ConnectionManager;
|
2017-08-20 22:59:16 +03:00
|
|
|
|
2017-08-20 23:17:16 +03:00
|
|
|
use models;
|
2017-09-21 12:38:52 +03:00
|
|
|
use schema::*;
|
2017-08-20 23:17:16 +03:00
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
#[derive(Clone)]
|
2017-08-20 22:59:16 +03:00
|
|
|
pub struct State {
|
2017-09-08 17:21:24 +03:00
|
|
|
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
|
|
|
|
cpu_pool: futures_cpupool::CpuPool,
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
pub type Error = Box<std::error::Error + Send + Sync>;
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub enum SlugLookup {
|
|
|
|
Miss,
|
|
|
|
Hit {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
},
|
|
|
|
Redirect(String),
|
|
|
|
}
|
|
|
|
|
2017-09-21 12:38:52 +03:00
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="article_revisions"]
|
|
|
|
struct NewRevision<'a> {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
slug: &'a str,
|
|
|
|
title: &'a str,
|
|
|
|
body: &'a str,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: Option<&'a str>,
|
2017-09-21 12:38:52 +03:00
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> {
|
|
|
|
let base_slug = ::slug::slugify(title);
|
2017-09-21 11:23:30 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if let Some(prev_slug) = prev_slug {
|
|
|
|
if prev_slug == "" {
|
|
|
|
// Never give a non-empty slug to the front page
|
|
|
|
return Ok(String::new());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if title == prev_title {
|
|
|
|
return Ok(prev_slug.to_owned());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if base_slug == prev_slug {
|
|
|
|
return Ok(base_slug);
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
let mut slug = base_slug.clone();
|
|
|
|
let mut disambiguator = 1;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let slug_in_use = article_revisions::table
|
2017-09-21 11:09:57 +03:00
|
|
|
.filter(article_revisions::article_id.ne(article_id))
|
2017-09-21 00:31:25 +03:00
|
|
|
.filter(article_revisions::slug.eq(&slug))
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.count()
|
|
|
|
.first::<i64>(conn)? != 0;
|
|
|
|
|
|
|
|
if !slug_in_use {
|
|
|
|
break Ok(slug);
|
|
|
|
}
|
|
|
|
|
|
|
|
disambiguator += 1;
|
|
|
|
slug = format!("{}-{}", base_slug, disambiguator);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-20 22:59:16 +03:00
|
|
|
impl State {
|
2017-09-10 13:29:33 +03:00
|
|
|
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
|
2017-09-08 17:21:24 +03:00
|
|
|
State {
|
|
|
|
connection_pool,
|
2017-09-10 13:29:33 +03:00
|
|
|
cpu_pool,
|
2017-09-08 17:21:24 +03:00
|
|
|
}
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
|
|
|
|
2017-10-24 11:30:12 +03:00
|
|
|
pub fn get_article_slug(&self, article_id: i32) -> CpuFuture<Option<String>, Error> {
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.select((article_revisions::slug))
|
|
|
|
.first::<String>(&*connection_pool.get()?)
|
|
|
|
.optional()?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
|
2017-09-08 17:21:24 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
2017-09-08 16:58:15 +03:00
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
2017-09-20 12:54:26 +03:00
|
|
|
.filter(article_revisions::revision.eq(revision))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<models::ArticleRevision>(&*connection_pool.get()?)
|
|
|
|
.optional()?)
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
2017-08-21 00:44:52 +03:00
|
|
|
}
|
2017-09-05 18:07:57 +03:00
|
|
|
|
2017-10-20 21:48:38 +03:00
|
|
|
pub fn query_article_revision_stubs<F>(&self, f: F) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
|
|
|
|
where
|
|
|
|
F: 'static + Send + Sync,
|
|
|
|
for <'a> F:
|
|
|
|
FnOnce(article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>) ->
|
|
|
|
article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
|
|
|
|
{
|
2017-10-03 11:37:18 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
2017-10-20 21:48:38 +03:00
|
|
|
use schema::article_revisions::dsl::*;
|
2017-10-03 11:37:18 +03:00
|
|
|
|
2017-10-20 21:48:38 +03:00
|
|
|
Ok(f(article_revisions.into_boxed())
|
2017-10-03 11:37:18 +03:00
|
|
|
.select((
|
2017-10-20 21:48:38 +03:00
|
|
|
sequence_number,
|
|
|
|
article_id,
|
|
|
|
revision,
|
|
|
|
created,
|
|
|
|
slug,
|
|
|
|
title,
|
|
|
|
latest,
|
|
|
|
author,
|
2017-10-03 11:37:18 +03:00
|
|
|
))
|
2017-10-20 21:48:38 +03:00
|
|
|
.load(&*connection_pool.get()?)?
|
|
|
|
)
|
2017-10-03 11:37:18 +03:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-13 16:06:16 +03:00
|
|
|
pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
2017-10-24 22:23:18 +03:00
|
|
|
self.query_article_revision_stubs(|query| {
|
|
|
|
query
|
2017-10-13 16:06:16 +03:00
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.order(article_revisions::title.asc())
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub fn lookup_slug(&self, slug: String) -> CpuFuture<SlugLookup, Error> {
|
|
|
|
#[derive(Queryable)]
|
|
|
|
struct ArticleRevisionStub {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(match article_revisions::table
|
|
|
|
.filter(article_revisions::slug.eq(slug))
|
|
|
|
.order(article_revisions::sequence_number.desc())
|
|
|
|
.select((
|
|
|
|
article_revisions::article_id,
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::latest,
|
|
|
|
))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<ArticleRevisionStub>(&*conn)
|
|
|
|
.optional()?
|
2017-09-20 12:54:26 +03:00
|
|
|
{
|
|
|
|
None => SlugLookup::Miss,
|
|
|
|
Some(ref stub) if stub.latest => SlugLookup::Hit {
|
|
|
|
article_id: stub.article_id,
|
|
|
|
revision: stub.revision,
|
|
|
|
},
|
|
|
|
Some(stub) => SlugLookup::Redirect(
|
|
|
|
article_revisions::table
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.filter(article_revisions::article_id.eq(stub.article_id))
|
|
|
|
.select(article_revisions::slug)
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<String>(&*conn)?
|
2017-09-20 12:54:26 +03:00
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
|
2017-09-21 11:09:57 +03:00
|
|
|
-> CpuFuture<models::ArticleRevision, Error>
|
|
|
|
{
|
2017-09-08 17:21:24 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
2017-09-21 00:31:25 +03:00
|
|
|
let (latest_revision, prev_title, prev_slug) = article_revisions::table
|
2017-09-08 17:21:24 +03:00
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.order(article_revisions::revision.desc())
|
2017-09-20 12:54:26 +03:00
|
|
|
.select((
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::slug,
|
|
|
|
))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<(i32, String, String)>(&*conn)?;
|
2017-09-08 17:21:24 +03:00
|
|
|
|
|
|
|
if latest_revision != base_revision {
|
|
|
|
// TODO: If it is the same edit repeated, just respond OK
|
|
|
|
// TODO: If there is a conflict, transform the edit to work seamlessly
|
|
|
|
unimplemented!("TODO Missing handling of revision conflicts");
|
|
|
|
}
|
|
|
|
let new_revision = base_revision + 1;
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
let slug = decide_slug(&*conn, article_id, &prev_title, &title, Some(&prev_slug))?;
|
2017-09-20 12:54:26 +03:00
|
|
|
|
|
|
|
diesel::update(
|
|
|
|
article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(base_revision))
|
|
|
|
)
|
|
|
|
.set(article_revisions::latest.eq(false))
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
2017-09-08 17:21:24 +03:00
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
2017-09-20 12:54:26 +03:00
|
|
|
slug: &slug,
|
2017-09-08 17:21:24 +03:00
|
|
|
title: &title,
|
|
|
|
body: &body,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: author.as_ref().map(|x| &**x),
|
2017-09-20 12:54:26 +03:00
|
|
|
latest: true,
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
2017-09-21 11:58:54 +03:00
|
|
|
.first::<models::ArticleRevision>(&*conn)?
|
2017-09-08 17:21:24 +03:00
|
|
|
)
|
2017-09-08 16:37:58 +03:00
|
|
|
})
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
2017-09-05 18:07:57 +03:00
|
|
|
}
|
2017-09-21 12:38:52 +03:00
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
|
2017-09-21 12:38:52 +03:00
|
|
|
-> CpuFuture<models::ArticleRevision, Error>
|
|
|
|
{
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="articles"]
|
|
|
|
struct NewArticle {
|
|
|
|
id: Option<i32>
|
|
|
|
}
|
|
|
|
|
|
|
|
let article_id = {
|
|
|
|
use diesel::expression::sql_literal::sql;
|
|
|
|
// Diesel and SQLite are a bit in disagreement for how this should look:
|
|
|
|
sql::<(diesel::types::Integer)>("INSERT INTO articles VALUES (null)")
|
|
|
|
.execute(&*conn)?;
|
|
|
|
sql::<(diesel::types::Integer)>("SELECT LAST_INSERT_ROWID()")
|
|
|
|
.load::<i32>(&*conn)?
|
|
|
|
.pop().expect("Statement must evaluate to an integer")
|
|
|
|
};
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
let slug = decide_slug(&*conn, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?;
|
2017-09-21 12:38:52 +03:00
|
|
|
|
|
|
|
let new_revision = 1;
|
|
|
|
|
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
|
|
|
slug: &slug,
|
|
|
|
title: &title,
|
|
|
|
body: &body,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: author.as_ref().map(|x| &**x),
|
2017-09-21 12:38:52 +03:00
|
|
|
latest: true,
|
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
|
|
|
.first::<models::ArticleRevision>(&*conn)?
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
2017-10-24 17:48:16 +03:00
|
|
|
|
2017-10-24 23:15:42 +03:00
|
|
|
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> CpuFuture<Vec<models::SearchResult>, Error> {
|
2017-10-24 17:48:16 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
use diesel::expression::sql_literal::sql;
|
2017-10-24 23:15:42 +03:00
|
|
|
use diesel::types::{Integer, Text};
|
2017-10-24 17:48:16 +03:00
|
|
|
|
2017-10-24 22:20:55 +03:00
|
|
|
fn fts_quote(src: &str) -> String {
|
|
|
|
format!("\"{}\"", src.replace('\"', "\"\""))
|
|
|
|
}
|
|
|
|
|
2017-10-24 22:54:27 +03:00
|
|
|
let words = query_string
|
|
|
|
.split_whitespace()
|
|
|
|
.map(fts_quote)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let query = if words.len() > 1 {
|
|
|
|
format!("NEAR({})", words.join(" "))
|
|
|
|
} else if words.len() == 1 {
|
|
|
|
format!("{}*", words[0])
|
|
|
|
} else {
|
|
|
|
"\"\"".to_owned()
|
|
|
|
};
|
2017-10-24 22:20:55 +03:00
|
|
|
|
2017-10-24 17:48:16 +03:00
|
|
|
Ok(
|
|
|
|
sql::<(Text, Text, Text)>(
|
2017-10-24 23:15:42 +03:00
|
|
|
"SELECT title, snippet(article_search, 1, '', '', '\u{2026}', ?), slug \
|
2017-10-24 17:48:16 +03:00
|
|
|
FROM article_search \
|
2017-10-24 22:20:55 +03:00
|
|
|
WHERE article_search MATCH ? \
|
2017-10-24 23:15:42 +03:00
|
|
|
ORDER BY rank \
|
|
|
|
LIMIT ? OFFSET ?"
|
2017-10-24 17:48:16 +03:00
|
|
|
)
|
2017-10-24 23:15:42 +03:00
|
|
|
.bind::<Integer, _>(snippet_size)
|
2017-10-24 22:20:55 +03:00
|
|
|
.bind::<Text, _>(query)
|
2017-10-24 23:15:42 +03:00
|
|
|
.bind::<Integer, _>(limit)
|
|
|
|
.bind::<Integer, _>(offset)
|
2017-10-24 17:48:16 +03:00
|
|
|
.load(&*connection_pool.get()?)?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|