2017-08-20 22:59:16 +03:00
|
|
|
use std;
|
|
|
|
|
2017-09-05 18:07:57 +03:00
|
|
|
use diesel;
|
2017-08-20 22:59:16 +03:00
|
|
|
use diesel::sqlite::SqliteConnection;
|
2017-08-21 00:44:52 +03:00
|
|
|
use diesel::prelude::*;
|
2017-09-08 17:21:24 +03:00
|
|
|
use futures_cpupool::{self, CpuFuture};
|
2017-09-05 15:55:10 +03:00
|
|
|
use r2d2::Pool;
|
|
|
|
use r2d2_diesel::ConnectionManager;
|
2017-08-20 22:59:16 +03:00
|
|
|
|
2017-11-20 14:37:44 +03:00
|
|
|
use merge;
|
2017-08-20 23:17:16 +03:00
|
|
|
use models;
|
2017-09-21 12:38:52 +03:00
|
|
|
use schema::*;
|
2017-08-20 23:17:16 +03:00
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
#[derive(Clone)]
|
2017-08-20 22:59:16 +03:00
|
|
|
pub struct State {
|
2017-09-08 17:21:24 +03:00
|
|
|
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
|
|
|
|
cpu_pool: futures_cpupool::CpuPool,
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
|
|
|
|
2017-09-05 15:55:10 +03:00
|
|
|
pub type Error = Box<std::error::Error + Send + Sync>;
|
|
|
|
|
2017-09-20 12:54:26 +03:00
|
|
|
pub enum SlugLookup {
|
|
|
|
Miss,
|
|
|
|
Hit {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
},
|
|
|
|
Redirect(String),
|
|
|
|
}
|
|
|
|
|
2017-09-21 12:38:52 +03:00
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="article_revisions"]
|
|
|
|
struct NewRevision<'a> {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
slug: &'a str,
|
|
|
|
title: &'a str,
|
|
|
|
body: &'a str,
|
2017-10-18 17:33:21 +03:00
|
|
|
author: Option<&'a str>,
|
2017-09-21 12:38:52 +03:00
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
#[derive(Debug, PartialEq)]
|
|
|
|
pub struct RebaseConflict {
|
2017-11-20 18:07:33 +03:00
|
|
|
pub base_article: models::ArticleRevisionStub,
|
|
|
|
pub title: merge::MergeResult<char>,
|
|
|
|
pub body: merge::MergeResult<String>,
|
2017-11-20 17:08:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq)]
|
|
|
|
enum RebaseResult {
|
|
|
|
Clean { title: String, body: String },
|
|
|
|
Conflict(RebaseConflict),
|
|
|
|
}
|
|
|
|
|
|
|
|
pub enum UpdateResult {
|
|
|
|
Success(models::ArticleRevision),
|
|
|
|
RebaseConflict(RebaseConflict),
|
|
|
|
}
|
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> {
|
|
|
|
let base_slug = ::slug::slugify(title);
|
2017-09-21 11:23:30 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if let Some(prev_slug) = prev_slug {
|
|
|
|
if prev_slug == "" {
|
|
|
|
// Never give a non-empty slug to the front page
|
|
|
|
return Ok(String::new());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if title == prev_title {
|
|
|
|
return Ok(prev_slug.to_owned());
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
|
2017-10-02 18:08:59 +03:00
|
|
|
if base_slug == prev_slug {
|
|
|
|
return Ok(base_slug);
|
|
|
|
}
|
2017-09-21 00:31:25 +03:00
|
|
|
}
|
|
|
|
|
2017-10-30 13:44:36 +03:00
|
|
|
let base_slug = if base_slug.is_empty() { "article" } else { &base_slug };
|
|
|
|
|
2017-09-21 00:31:25 +03:00
|
|
|
use schema::article_revisions;
|
|
|
|
|
2017-10-30 13:44:36 +03:00
|
|
|
let mut slug = base_slug.to_owned();
|
2017-09-21 00:31:25 +03:00
|
|
|
let mut disambiguator = 1;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let slug_in_use = article_revisions::table
|
2017-09-21 11:09:57 +03:00
|
|
|
.filter(article_revisions::article_id.ne(article_id))
|
2017-09-21 00:31:25 +03:00
|
|
|
.filter(article_revisions::slug.eq(&slug))
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.count()
|
|
|
|
.first::<i64>(conn)? != 0;
|
|
|
|
|
|
|
|
if !slug_in_use {
|
|
|
|
break Ok(slug);
|
|
|
|
}
|
|
|
|
|
|
|
|
disambiguator += 1;
|
|
|
|
slug = format!("{}-{}", base_slug, disambiguator);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
struct SyncState<'a> {
|
|
|
|
db_connection: &'a diesel::SqliteConnection,
|
|
|
|
}
|
2017-08-20 22:59:16 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
impl<'a> SyncState<'a> {
|
|
|
|
fn new(db_connection: &diesel::SqliteConnection) -> SyncState {
|
|
|
|
SyncState { db_connection }
|
|
|
|
}
|
2017-10-24 11:30:12 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn get_article_slug(&self, article_id: i32) -> Result<Option<String>, Error> {
|
|
|
|
use schema::article_revisions;
|
2017-10-24 11:30:12 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.select((article_revisions::slug))
|
|
|
|
.first::<String>(self.db_connection)
|
|
|
|
.optional()?)
|
2017-10-24 11:30:12 +03:00
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevision>, Error> {
|
|
|
|
use schema::article_revisions;
|
2017-09-08 17:21:24 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(revision))
|
|
|
|
.first::<models::ArticleRevision>(self.db_connection)
|
|
|
|
.optional()?)
|
2017-08-21 00:44:52 +03:00
|
|
|
}
|
2017-09-05 18:07:57 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn query_article_revision_stubs<F>(&self, f: F) -> Result<Vec<models::ArticleRevisionStub>, Error>
|
2017-10-20 21:48:38 +03:00
|
|
|
where
|
|
|
|
F: 'static + Send + Sync,
|
2017-11-15 17:48:45 +03:00
|
|
|
for <'x> F:
|
|
|
|
FnOnce(article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>) ->
|
|
|
|
article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
|
2017-10-20 21:48:38 +03:00
|
|
|
{
|
2017-11-15 17:48:45 +03:00
|
|
|
use schema::article_revisions::dsl::*;
|
|
|
|
|
|
|
|
Ok(f(article_revisions.into_boxed())
|
|
|
|
.select((
|
|
|
|
sequence_number,
|
|
|
|
article_id,
|
|
|
|
revision,
|
|
|
|
created,
|
|
|
|
slug,
|
|
|
|
title,
|
|
|
|
latest,
|
|
|
|
author,
|
|
|
|
))
|
|
|
|
.load(self.db_connection)?
|
|
|
|
)
|
2017-10-03 11:37:18 +03:00
|
|
|
}
|
|
|
|
|
2017-11-20 18:07:33 +03:00
|
|
|
fn get_article_revision_stub(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevisionStub>, Error> {
|
|
|
|
use schema::article_revisions;
|
|
|
|
|
|
|
|
Ok(self.query_article_revision_stubs(move |query| {
|
|
|
|
query
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(revision))
|
|
|
|
.limit(1)
|
|
|
|
})?.pop())
|
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> {
|
2017-09-20 12:54:26 +03:00
|
|
|
#[derive(Queryable)]
|
|
|
|
struct ArticleRevisionStub {
|
|
|
|
article_id: i32,
|
|
|
|
revision: i32,
|
|
|
|
latest: bool,
|
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
self.db_connection.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
2017-09-20 12:54:26 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
Ok(match article_revisions::table
|
|
|
|
.filter(article_revisions::slug.eq(slug))
|
|
|
|
.order(article_revisions::sequence_number.desc())
|
|
|
|
.select((
|
|
|
|
article_revisions::article_id,
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::latest,
|
|
|
|
))
|
|
|
|
.first::<ArticleRevisionStub>(self.db_connection)
|
|
|
|
.optional()?
|
|
|
|
{
|
|
|
|
None => SlugLookup::Miss,
|
|
|
|
Some(ref stub) if stub.latest => SlugLookup::Hit {
|
|
|
|
article_id: stub.article_id,
|
|
|
|
revision: stub.revision,
|
|
|
|
},
|
|
|
|
Some(stub) => SlugLookup::Redirect(
|
|
|
|
article_revisions::table
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.filter(article_revisions::article_id.eq(stub.article_id))
|
|
|
|
.select(article_revisions::slug)
|
|
|
|
.first::<String>(self.db_connection)?
|
|
|
|
)
|
2017-09-20 12:54:26 +03:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-11-20 15:12:36 +03:00
|
|
|
fn rebase_update(&self, article_id: i32, target_base_revision: i32, existing_base_revision: i32, title: String, body: String)
|
2017-11-20 17:08:34 +03:00
|
|
|
-> Result<RebaseResult, Error>
|
2017-11-20 14:37:44 +03:00
|
|
|
{
|
2017-11-20 15:12:36 +03:00
|
|
|
let mut title_a = title;
|
|
|
|
let mut body_a = body;
|
2017-11-20 14:37:44 +03:00
|
|
|
|
|
|
|
for revision in existing_base_revision..target_base_revision {
|
|
|
|
let mut stored = article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.ge(revision))
|
|
|
|
.filter(article_revisions::revision.le(revision+1))
|
|
|
|
.order(article_revisions::revision.asc())
|
2017-11-20 15:12:36 +03:00
|
|
|
.select((
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::body,
|
|
|
|
))
|
|
|
|
.load::<(String, String)>(self.db_connection)?;
|
2017-11-20 14:37:44 +03:00
|
|
|
|
2017-11-20 15:12:36 +03:00
|
|
|
let (title_b, body_b) = stored.pop().expect("Application layer guarantee");
|
|
|
|
let (title_o, body_o) = stored.pop().expect("Application layer guarantee");
|
2017-11-20 14:37:44 +03:00
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
use merge::MergeResult::*;
|
|
|
|
|
|
|
|
let update = {
|
|
|
|
let title_merge = merge::merge_chars(&title_a, &title_o, &title_b);
|
|
|
|
let body_merge = merge::merge_lines(&body_a, &body_o, &body_b);
|
|
|
|
|
|
|
|
match (title_merge, body_merge) {
|
|
|
|
(Clean(title), Clean(body)) => (title, body),
|
|
|
|
(title_merge, body_merge) => {
|
|
|
|
return Ok(RebaseResult::Conflict(RebaseConflict {
|
2017-11-20 18:07:33 +03:00
|
|
|
base_article: self.get_article_revision_stub(article_id, revision+1)?.expect("Application layer guarantee"),
|
2017-11-20 17:08:34 +03:00
|
|
|
title: title_merge,
|
|
|
|
body: body_merge.to_strings(),
|
|
|
|
}));
|
|
|
|
},
|
|
|
|
}
|
2017-11-20 15:12:36 +03:00
|
|
|
};
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
title_a = update.0;
|
|
|
|
body_a = update.1;
|
2017-11-20 14:37:44 +03:00
|
|
|
}
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
Ok(RebaseResult::Clean { title: title_a, body: body_a })
|
2017-11-20 14:37:44 +03:00
|
|
|
}
|
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
|
2017-11-20 17:08:34 +03:00
|
|
|
-> Result<UpdateResult, Error>
|
2017-09-21 11:09:57 +03:00
|
|
|
{
|
2017-11-15 17:48:45 +03:00
|
|
|
if title.is_empty() {
|
|
|
|
Err("title cannot be empty")?;
|
|
|
|
}
|
2017-09-08 17:21:24 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
self.db_connection.transaction(|| {
|
|
|
|
use schema::article_revisions;
|
2017-10-30 15:20:40 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
let (latest_revision, prev_title, prev_slug) = article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.order(article_revisions::revision.desc())
|
|
|
|
.select((
|
|
|
|
article_revisions::revision,
|
|
|
|
article_revisions::title,
|
|
|
|
article_revisions::slug,
|
|
|
|
))
|
|
|
|
.first::<(i32, String, String)>(self.db_connection)?;
|
2017-09-08 17:21:24 +03:00
|
|
|
|
2017-11-20 14:37:44 +03:00
|
|
|
// TODO: If this is an historic edit repeated, just respond OK
|
|
|
|
// This scheme would make POST idempotent.
|
|
|
|
|
|
|
|
if base_revision > latest_revision {
|
|
|
|
Err("This edit is based on a future version of the article")?;
|
2017-11-15 17:48:45 +03:00
|
|
|
}
|
2017-11-20 14:37:44 +03:00
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let rebase_result = self.rebase_update(article_id, latest_revision, base_revision, title, body)?;
|
|
|
|
|
|
|
|
let (title, body) = match rebase_result {
|
|
|
|
RebaseResult::Clean { title, body } => (title, body),
|
|
|
|
RebaseResult::Conflict(x) => return Ok(UpdateResult::RebaseConflict(x)),
|
|
|
|
};
|
2017-11-20 14:37:44 +03:00
|
|
|
|
|
|
|
let new_revision = latest_revision + 1;
|
2017-09-08 17:21:24 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
let slug = decide_slug(self.db_connection, article_id, &prev_title, &title, Some(&prev_slug))?;
|
|
|
|
|
|
|
|
diesel::update(
|
|
|
|
article_revisions::table
|
2017-09-08 17:21:24 +03:00
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
2017-11-20 14:37:44 +03:00
|
|
|
.filter(article_revisions::revision.eq(latest_revision))
|
2017-11-15 17:48:45 +03:00
|
|
|
)
|
|
|
|
.set(article_revisions::latest.eq(false))
|
|
|
|
.execute(self.db_connection)?;
|
|
|
|
|
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
|
|
|
slug: &slug,
|
|
|
|
title: &title,
|
|
|
|
body: &body,
|
|
|
|
author: author.as_ref().map(|x| &**x),
|
|
|
|
latest: true,
|
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(self.db_connection)?;
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
Ok(UpdateResult::Success(article_revisions::table
|
2017-11-15 17:48:45 +03:00
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
|
|
|
.first::<models::ArticleRevision>(self.db_connection)?
|
2017-11-20 17:08:34 +03:00
|
|
|
))
|
2017-09-08 17:21:24 +03:00
|
|
|
})
|
2017-09-05 18:07:57 +03:00
|
|
|
}
|
2017-09-21 12:38:52 +03:00
|
|
|
|
2017-10-18 17:33:21 +03:00
|
|
|
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
|
2017-11-15 17:48:45 +03:00
|
|
|
-> Result<models::ArticleRevision, Error>
|
2017-09-21 12:38:52 +03:00
|
|
|
{
|
2017-11-15 17:48:45 +03:00
|
|
|
if title.is_empty() {
|
|
|
|
Err("title cannot be empty")?;
|
|
|
|
}
|
2017-09-21 12:38:52 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
self.db_connection.transaction(|| {
|
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name="articles"]
|
|
|
|
struct NewArticle {
|
|
|
|
id: Option<i32>
|
2017-10-30 15:20:40 +03:00
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
let article_id = {
|
|
|
|
use diesel::expression::sql_literal::sql;
|
|
|
|
// Diesel and SQLite are a bit in disagreement for how this should look:
|
|
|
|
sql::<(diesel::types::Integer)>("INSERT INTO articles VALUES (null)")
|
|
|
|
.execute(self.db_connection)?;
|
|
|
|
sql::<(diesel::types::Integer)>("SELECT LAST_INSERT_ROWID()")
|
|
|
|
.load::<i32>(self.db_connection)?
|
|
|
|
.pop().expect("Statement must evaluate to an integer")
|
|
|
|
};
|
|
|
|
|
|
|
|
let slug = decide_slug(self.db_connection, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?;
|
|
|
|
|
|
|
|
let new_revision = 1;
|
|
|
|
|
|
|
|
diesel::insert(&NewRevision {
|
|
|
|
article_id,
|
|
|
|
revision: new_revision,
|
|
|
|
slug: &slug,
|
|
|
|
title: &title,
|
|
|
|
body: &body,
|
|
|
|
author: author.as_ref().map(|x| &**x),
|
|
|
|
latest: true,
|
|
|
|
})
|
|
|
|
.into(article_revisions::table)
|
|
|
|
.execute(self.db_connection)?;
|
|
|
|
|
|
|
|
Ok(article_revisions::table
|
|
|
|
.filter(article_revisions::article_id.eq(article_id))
|
|
|
|
.filter(article_revisions::revision.eq(new_revision))
|
|
|
|
.first::<models::ArticleRevision>(self.db_connection)?
|
|
|
|
)
|
2017-09-21 12:38:52 +03:00
|
|
|
})
|
|
|
|
}
|
2017-10-24 17:48:16 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> Result<Vec<models::SearchResult>, Error> {
|
|
|
|
use diesel::expression::sql_literal::sql;
|
|
|
|
use diesel::types::{Integer, Text};
|
|
|
|
|
|
|
|
fn fts_quote(src: &str) -> String {
|
|
|
|
format!("\"{}\"", src.replace('\"', "\"\""))
|
|
|
|
}
|
|
|
|
|
|
|
|
let words = query_string
|
|
|
|
.split_whitespace()
|
|
|
|
.map(fts_quote)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let query = if words.len() > 1 {
|
|
|
|
format!("NEAR({})", words.join(" "))
|
|
|
|
} else if words.len() == 1 {
|
|
|
|
format!("{}*", words[0])
|
|
|
|
} else {
|
|
|
|
"\"\"".to_owned()
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(
|
|
|
|
sql::<(Text, Text, Text)>(
|
|
|
|
"SELECT title, snippet(article_search, 1, '', '', '\u{2026}', ?), slug \
|
|
|
|
FROM article_search \
|
|
|
|
WHERE article_search MATCH ? \
|
|
|
|
ORDER BY rank \
|
|
|
|
LIMIT ? OFFSET ?"
|
|
|
|
)
|
|
|
|
.bind::<Integer, _>(snippet_size)
|
|
|
|
.bind::<Text, _>(query)
|
|
|
|
.bind::<Integer, _>(limit)
|
|
|
|
.bind::<Integer, _>(offset)
|
|
|
|
.load(self.db_connection)?)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl State {
|
|
|
|
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
|
|
|
|
State {
|
|
|
|
connection_pool,
|
|
|
|
cpu_pool,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn execute<F, T>(&self, f: F) -> CpuFuture<T, Error>
|
|
|
|
where
|
|
|
|
F: 'static + Sync + Send,
|
|
|
|
for <'a> F: FnOnce(SyncState<'a>) -> Result<T, Error>,
|
|
|
|
T: 'static + Send,
|
|
|
|
{
|
2017-10-24 17:48:16 +03:00
|
|
|
let connection_pool = self.connection_pool.clone();
|
|
|
|
|
|
|
|
self.cpu_pool.spawn_fn(move || {
|
2017-11-15 17:48:45 +03:00
|
|
|
let db_connection = connection_pool.get()?;
|
2017-10-24 17:48:16 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
f(SyncState::new(&*db_connection))
|
|
|
|
})
|
|
|
|
}
|
2017-10-24 22:20:55 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn get_article_slug(&self, article_id: i32) -> CpuFuture<Option<String>, Error> {
|
|
|
|
self.execute(move |state| state.get_article_slug(article_id))
|
|
|
|
}
|
2017-10-24 22:20:55 +03:00
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
|
|
|
|
self.execute(move |state| state.get_article_revision(article_id, revision))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn query_article_revision_stubs<F>(&self, f: F) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
|
|
|
|
where
|
|
|
|
F: 'static + Send + Sync,
|
|
|
|
for <'a> F:
|
|
|
|
FnOnce(article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>) ->
|
|
|
|
article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
|
|
|
|
{
|
|
|
|
self.execute(move |state| state.query_article_revision_stubs(f))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
|
|
|
|
self.query_article_revision_stubs(|query| {
|
|
|
|
query
|
|
|
|
.filter(article_revisions::latest.eq(true))
|
|
|
|
.order(article_revisions::title.asc())
|
2017-10-24 17:48:16 +03:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-11-15 17:48:45 +03:00
|
|
|
pub fn lookup_slug(&self, slug: String) -> CpuFuture<SlugLookup, Error> {
|
|
|
|
self.execute(move |state| state.lookup_slug(slug))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
|
2017-11-20 17:08:34 +03:00
|
|
|
-> CpuFuture<UpdateResult, Error>
|
2017-11-15 17:48:45 +03:00
|
|
|
{
|
|
|
|
self.execute(move |state| state.update_article(article_id, base_revision, title, body, author))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
|
|
|
|
-> CpuFuture<models::ArticleRevision, Error>
|
|
|
|
{
|
|
|
|
self.execute(move |state| state.create_article(target_slug, title, body, author))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> CpuFuture<Vec<models::SearchResult>, Error> {
|
|
|
|
self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size))
|
|
|
|
}
|
2017-08-20 22:59:16 +03:00
|
|
|
}
|
2017-11-15 18:27:28 +03:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use super::*;
|
|
|
|
use db;
|
|
|
|
|
2017-11-20 18:37:52 +03:00
|
|
|
impl UpdateResult {
|
|
|
|
pub fn unwrap(self) -> models::ArticleRevision {
|
|
|
|
match self {
|
|
|
|
UpdateResult::Success(x) => x,
|
|
|
|
_ => panic!("Expected success")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-16 12:44:55 +03:00
|
|
|
macro_rules! init {
|
|
|
|
($state:ident) => {
|
|
|
|
let db = db::test_connection();
|
|
|
|
let $state = SyncState::new(&db);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-15 18:27:28 +03:00
|
|
|
#[test]
|
|
|
|
fn get_article_slug() {
|
2017-11-16 12:44:55 +03:00
|
|
|
init!(state);
|
2017-11-15 18:27:28 +03:00
|
|
|
assert_matches!(state.get_article_slug(0), Ok(None));
|
|
|
|
}
|
2017-11-16 12:44:55 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn create_article() {
|
|
|
|
init!(state);
|
|
|
|
let article_revision = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
|
|
|
|
assert_eq!("title", article_revision.slug);
|
|
|
|
assert_eq!(true, article_revision.latest);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn create_article_when_empty_slug_then_empty_slug() {
|
|
|
|
// Front page gets to keep its empty slug
|
|
|
|
init!(state);
|
|
|
|
let article_revision = state.create_article(Some("".into()), "Title".into(), "Body".into(), None).unwrap();
|
|
|
|
assert_eq!("", article_revision.slug);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let new_revision = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None).unwrap().unwrap();
|
2017-11-16 12:44:55 +03:00
|
|
|
|
|
|
|
assert_eq!(article.article_id, new_revision.article_id);
|
|
|
|
|
|
|
|
// Revision numbers must actually be sequential:
|
|
|
|
assert_eq!(article.revision + 1, new_revision.revision);
|
|
|
|
|
|
|
|
assert_eq!(article.title, new_revision.title);
|
|
|
|
|
|
|
|
// Slug must remain unchanged when the title is unchanged:
|
|
|
|
assert_eq!(article.slug, new_revision.slug);
|
|
|
|
|
|
|
|
assert_eq!("New body", new_revision.body);
|
|
|
|
}
|
2017-11-17 18:17:05 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article_when_sequential_edits_then_last_wins() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None).unwrap().unwrap();
|
|
|
|
let second_edit = state.update_article(article.article_id, first_edit.revision, article.title.clone(), "Newer body".into(), None).unwrap().unwrap();
|
2017-11-17 18:17:05 +03:00
|
|
|
|
|
|
|
assert_eq!("Newer body", second_edit.body);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article_when_edit_conflict_then_merge() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None).unwrap();
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None).unwrap().unwrap();
|
|
|
|
let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None).unwrap().unwrap();
|
2017-11-17 18:17:05 +03:00
|
|
|
|
|
|
|
assert!(article.revision < first_edit.revision);
|
|
|
|
assert!(first_edit.revision < second_edit.revision);
|
|
|
|
|
|
|
|
assert_eq!("a\nx\nb\ny\nc\n", second_edit.body);
|
|
|
|
}
|
2017-11-20 14:41:14 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None).unwrap();
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx1\nb\nc\n".into(), None).unwrap().unwrap();
|
|
|
|
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nb\nc\n".into(), None).unwrap().unwrap();
|
|
|
|
let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nx3\nb\nc\n".into(), None).unwrap().unwrap();
|
2017-11-20 14:41:14 +03:00
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let rebase_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None).unwrap().unwrap();
|
2017-11-20 14:41:14 +03:00
|
|
|
|
|
|
|
assert!(article.revision < edit.revision);
|
|
|
|
assert!(edit.revision < rebase_edit.revision);
|
|
|
|
|
|
|
|
assert_eq!("a\nx1\nx2\nx3\nb\ny\nc\n", rebase_edit.body);
|
|
|
|
}
|
2017-11-20 15:12:36 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article_when_title_edit_conflict_then_merge_title() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "titlle".into(), "".into(), None).unwrap();
|
|
|
|
|
2017-11-20 17:08:34 +03:00
|
|
|
let first_edit = state.update_article(article.article_id, article.revision, "Titlle".into(), article.body.clone(), None).unwrap().unwrap();
|
|
|
|
let second_edit = state.update_article(article.article_id, article.revision, "title".into(), article.body.clone(), None).unwrap().unwrap();
|
2017-11-20 15:12:36 +03:00
|
|
|
|
|
|
|
assert!(article.revision < first_edit.revision);
|
|
|
|
assert!(first_edit.revision < second_edit.revision);
|
|
|
|
|
|
|
|
assert_eq!("Title", second_edit.title);
|
|
|
|
}
|
2017-11-20 17:08:34 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn update_article_when_merge_conflict() {
|
|
|
|
init!(state);
|
|
|
|
|
|
|
|
let article = state.create_article(None, "Title".into(), "a".into(), None).unwrap();
|
|
|
|
|
2017-11-20 18:48:31 +03:00
|
|
|
let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "b".into(), None).unwrap().unwrap();
|
2017-11-20 17:08:34 +03:00
|
|
|
let conflict_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "c".into(), None).unwrap();
|
|
|
|
|
|
|
|
match conflict_edit {
|
|
|
|
UpdateResult::Success(..) => panic!("Expected conflict"),
|
2017-11-20 18:48:31 +03:00
|
|
|
UpdateResult::RebaseConflict(RebaseConflict { base_article, title, body }) => {
|
|
|
|
assert_eq!(first_edit.revision, base_article.revision);
|
2017-11-20 17:08:34 +03:00
|
|
|
assert_eq!(title, merge::MergeResult::Clean(article.title.clone()));
|
|
|
|
assert_eq!(body, merge::MergeResult::Conflicted(vec![
|
|
|
|
merge::Output::Conflict(vec!["c"], vec!["a"], vec!["b"]),
|
|
|
|
]).to_strings());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2017-11-15 18:27:28 +03:00
|
|
|
}
|