2017-08-20 22:59:16 +03:00
|
|
|
use std;
|
|
|
|
|
2017-09-05 18:07:57 +03:00
|
|
|
use diesel;
|
2017-08-20 22:59:16 +03:00
|
|
|
use diesel::sqlite::SqliteConnection;
|
2017-08-21 00:44:52 +03:00
|
|
|
use diesel::prelude::*;
|
2017-09-08 17:21:24 +03:00
|
|
|
use futures_cpupool::{self, CpuFuture};
|
2017-09-05 15:55:10 +03:00
|
|
|
use r2d2::Pool;
|
|
|
|
use r2d2_diesel::ConnectionManager;
|
2017-08-20 22:59:16 +03:00
|
|
|
|
2017-08-20 23:17:16 +03:00
|
|
|
use models;
|
2017-09-21 12:38:52 +03:00
|
|
|
use schema::*;
|
2017-08-20 23:17:16 +03:00
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
#[derive(Clone)]
|
2017-08-20 22:59:16 +03:00
|
|
|
pub struct State {
|
2017-09-08 17:21:24 +03:00
|
|
|
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
|
|
|
|
cpu_pool: futures_cpupool::CpuPool,
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
pub type Error = Box<std::error::Error + Send + Sync>;
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub enum SlugLookup {
|
|
|
|
Miss,
|
|
|
|
Hit {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
},
|
|
|
|
Redirect(String),
|
|
|
|
}
|
|
|
|
|
2017-09-21 12:38:52 +03:00
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="article_revisions"]
|
|
|
|
struct NewRevision<'a> {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
slug: &'a str,
|
|
|
|
title: &'a str,
|
|
|
|
body: &'a str,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: Option<&'a str>,
|
2017-09-21 12:38:52 +03:00
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> {
|
|
|
|
let base_slug = ::slug::slugify(title);
|
2017-09-21 11:23:30 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if let Some(prev_slug) = prev_slug {
|
|
|
|
if prev_slug == "" {
|
|
|
|
// Never give a non-empty slug to the front page
|
|
|
|
return Ok(String::new());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if title == prev_title {
|
|
|
|
return Ok(prev_slug.to_owned());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if base_slug == prev_slug {
|
|
|
|
return Ok(base_slug);
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
let mut slug = base_slug.clone();
|
|
|
|
let mut disambiguator = 1;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let slug_in_use = article_revisions::table
|
2017-09-21 11:09:57 +03:00
|
|
|
.filter(article_revisions::article_id.ne(article_id))
|
2017-09-21 00:31:25 +03:00
|
|
|
.filter(article_revisions::slug.eq(&slug))
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.count()
|
|
|
|
.first::<i64>(conn)? != 0;
|
|
|
|
|
|
|
|
if !slug_in_use {
|
|
|
|
break Ok(slug);
|
|
|
|
}
|
|
|
|
|
|
|
|
disambiguator += 1;
|
|
|
|
slug = format!("{}-{}", base_slug, disambiguator);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-20 22:59:16 +03:00
|
|
|
impl State {
|
2017-09-10 13:29:33 +03:00
|
|
|
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
|
2017-09-08 17:21:24 +03:00
|
|
|
State {
|
|
|
|
connection_pool,
|
2017-09-10 13:29:33 +03:00
|
|
|
cpu_pool,
|
2017-09-08 17:21:24 +03:00
|
|
|
}
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
|
2017-09-08 17:21:24 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
2017-09-08 16:58:15 +03:00
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
2017-09-20 12:54:26 +03:00
|
|
|
.filter(article_revisions::revision.eq(revision))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<models::ArticleRevision>(&*connection_pool.get()?)
|
|
|
|
.optional()?)
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
2017-08-21 00:44:52 +03:00
|
|
|
}
|
2017-09-05 18:07:57 +03:00
|
|
|
|
2017-10-03 11:37:18 +03:00
|
|
|
pub fn get_article_revision_stubs(&self, before: Option<i32>, limit: i32) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
let query = article_revisions::table
|
|
|
|
.order(article_revisions::sequence_number.desc())
|
|
|
|
.limit(limit as i64)
|
|
|
|
.select((
|
|
|
|
article_revisions::sequence_number,
|
|
|
|
article_revisions::article_id,
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::created,
|
|
|
|
article_revisions::slug,
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::latest,
|
2017-10-18 17:33:21 +03:00
|
|
|
article_revisions::author,
|
2017-10-03 11:37:18 +03:00
|
|
|
))
|
|
|
|
.into_boxed();
|
|
|
|
|
|
|
|
let query = match before {
|
|
|
|
Some(before) => query.filter(article_revisions::sequence_number.lt(before)),
|
|
|
|
None => query
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(query.load(&*connection_pool.get()?)?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-13 16:06:16 +03:00
|
|
|
pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.order(article_revisions::title.asc())
|
|
|
|
.select((
|
|
|
|
article_revisions::sequence_number,
|
|
|
|
article_revisions::article_id,
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::created,
|
|
|
|
article_revisions::slug,
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::latest,
|
2017-10-18 17:33:21 +03:00
|
|
|
article_revisions::author,
|
2017-10-13 16:06:16 +03:00
|
|
|
))
|
|
|
|
.load(&*connection_pool.get()?)?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub fn lookup_slug(&self, slug: String) -> CpuFuture<SlugLookup, Error> {
|
|
|
|
#[derive(Queryable)]
|
|
|
|
struct ArticleRevisionStub {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(match article_revisions::table
|
|
|
|
.filter(article_revisions::slug.eq(slug))
|
|
|
|
.order(article_revisions::sequence_number.desc())
|
|
|
|
.select((
|
|
|
|
article_revisions::article_id,
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::latest,
|
|
|
|
))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<ArticleRevisionStub>(&*conn)
|
|
|
|
.optional()?
|
2017-09-20 12:54:26 +03:00
|
|
|
{
|
|
|
|
None => SlugLookup::Miss,
|
|
|
|
Some(ref stub) if stub.latest => SlugLookup::Hit {
|
|
|
|
article_id: stub.article_id,
|
|
|
|
revision: stub.revision,
|
|
|
|
},
|
|
|
|
Some(stub) => SlugLookup::Redirect(
|
|
|
|
article_revisions::table
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.filter(article_revisions::article_id.eq(stub.article_id))
|
|
|
|
.select(article_revisions::slug)
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<String>(&*conn)?
|
2017-09-20 12:54:26 +03:00
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
|
2017-09-21 11:09:57 +03:00
|
|
|
-> CpuFuture<models::ArticleRevision, Error>
|
|
|
|
{
|
2017-09-08 17:21:24 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
2017-09-21 00:31:25 +03:00
|
|
|
let (latest_revision, prev_title, prev_slug) = article_revisions::table
|
2017-09-08 17:21:24 +03:00
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.order(article_revisions::revision.desc())
|
2017-09-20 12:54:26 +03:00
|
|
|
.select((
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::slug,
|
|
|
|
))
|
2017-09-21 00:40:07 +03:00
|
|
|
.first::<(i32, String, String)>(&*conn)?;
|
2017-09-08 17:21:24 +03:00
|
|
|
|
|
|
|
if latest_revision != base_revision {
|
|
|
|
// TODO: If it is the same edit repeated, just respond OK
|
|
|
|
// TODO: If there is a conflict, transform the edit to work seamlessly
|
|
|
|
unimplemented!("TODO Missing handling of revision conflicts");
|
|
|
|
}
|
|
|
|
let new_revision = base_revision + 1;
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
let slug = decide_slug(&*conn, article_id, &prev_title, &title, Some(&prev_slug))?;
|
2017-09-20 12:54:26 +03:00
|
|
|
|
|
|
|
diesel::update(
|
|
|
|
article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(base_revision))
|
|
|
|
)
|
|
|
|
.set(article_revisions::latest.eq(false))
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
2017-09-08 17:21:24 +03:00
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
2017-09-20 12:54:26 +03:00
|
|
|
slug: &slug,
|
2017-09-08 17:21:24 +03:00
|
|
|
title: &title,
|
|
|
|
body: &body,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: author.as_ref().map(|x| &**x),
|
2017-09-20 12:54:26 +03:00
|
|
|
latest: true,
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
2017-09-21 11:58:54 +03:00
|
|
|
.first::<models::ArticleRevision>(&*conn)?
|
2017-09-08 17:21:24 +03:00
|
|
|
)
|
2017-09-08 16:37:58 +03:00
|
|
|
})
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
2017-09-05 18:07:57 +03:00
|
|
|
}
|
2017-09-21 12:38:52 +03:00
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
|
2017-09-21 12:38:52 +03:00
|
|
|
-> CpuFuture<models::ArticleRevision, Error>
|
|
|
|
{
|
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
|
|
|
let conn = connection_pool.get()?;
|
|
|
|
|
|
|
|
conn.transaction(|| {
|
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="articles"]
|
|
|
|
struct NewArticle {
|
|
|
|
id: Option<i32>
|
|
|
|
}
|
|
|
|
|
|
|
|
let article_id = {
|
|
|
|
use diesel::expression::sql_literal::sql;
|
|
|
|
// Diesel and SQLite are a bit in disagreement for how this should look:
|
|
|
|
sql::<(diesel::types::Integer)>("INSERT INTO articles VALUES (null)")
|
|
|
|
.execute(&*conn)?;
|
|
|
|
sql::<(diesel::types::Integer)>("SELECT LAST_INSERT_ROWID()")
|
|
|
|
.load::<i32>(&*conn)?
|
|
|
|
.pop().expect("Statement must evaluate to an integer")
|
|
|
|
};
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
let slug = decide_slug(&*conn, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?;
|
2017-09-21 12:38:52 +03:00
|
|
|
|
|
|
|
let new_revision = 1;
|
|
|
|
|
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
|
|
|
slug: &slug,
|
|
|
|
title: &title,
|
|
|
|
body: &body,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: author.as_ref().map(|x| &**x),
|
2017-09-21 12:38:52 +03:00
|
|
|
latest: true,
|
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
|
|
|
.first::<models::ArticleRevision>(&*conn)?
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|