Implement lookup and redirect of articles by slugs

This commit is contained in:
Magnus Hoff 2017-09-20 11:54:26 +02:00
parent ada70b7671
commit e1d823d22e
8 changed files with 243 additions and 36 deletions

View file

@ -0,0 +1,64 @@
CREATE TABLE article_revisions_copy (
article_id INTEGER NOT NULL,
revision INTEGER NOT NULL,
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
title TEXT NOT NULL,
body TEXT NOT NULL,
PRIMARY KEY (article_id, revision),
FOREIGN KEY (article_id) REFERENCES articles(id)
);
INSERT INTO article_revisions_copy SELECT * FROM article_revisions;
DROP TABLE article_revisions;
CREATE TABLE article_revisions (
sequence_number INTEGER PRIMARY KEY NOT NULL,
article_id INTEGER NOT NULL,
revision INTEGER NOT NULL,
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
slug TEXT NOT NULL,
title TEXT NOT NULL,
body TEXT NOT NULL,
-- Actually a synthetic property, namely revision = MAX(revision)
-- GROUP BY article_id, but SQLite makes that so hard to work with:
latest BOOLEAN NOT NULL,
FOREIGN KEY (article_id) REFERENCES articles(id)
);
CREATE UNIQUE INDEX unique_revision_per_article_id ON article_revisions
(article_id, revision);
CREATE UNIQUE INDEX unique_latest_revision_per_article_id ON article_revisions
(article_id) WHERE latest=1;
CREATE INDEX slug_lookup ON article_revisions
(slug, revision);
INSERT INTO article_revisions SELECT
ROWID,
article_id,
revision,
created,
CAST(article_id AS TEXT) AS slug,
title,
body,
0
FROM article_revisions_copy;
UPDATE article_revisions
SET latest = 1
WHERE (article_id, revision) IN (
SELECT article_id, MAX(revision) AS revision
FROM article_revisions
GROUP BY article_id
);
CREATE UNIQUE INDEX slugs_index ON article_revisions (slug, article_id) WHERE latest=1;

View file

@ -0,0 +1,51 @@
use futures::{self, Future};
use hyper;
use hyper::header::Location;
use hyper::server::*;
use web::{Resource, ResponseFuture};
pub struct ArticleRedirectResource {
slug: String,
}
impl ArticleRedirectResource {
pub fn new(slug: String) -> Self {
// Hack to let redirects to "" work:
// TODO Calculate absolute Location URLs to conform to spec
// This would also remove the need for this hack
Self {
slug: if slug == "" { ".".to_owned() } else { slug }
}
}
}
impl Resource for ArticleRedirectResource {
fn allow(&self) -> Vec<hyper::Method> {
use hyper::Method::*;
vec![Options, Head, Get, Put]
}
fn head(&self) -> ResponseFuture {
Box::new(futures::finished(Response::new()
.with_status(hyper::StatusCode::TemporaryRedirect)
.with_header(Location::new(self.slug.clone()))
))
}
fn get(self: Box<Self>) -> ResponseFuture {
Box::new(self.head()
.and_then(move |head| {
Ok(head
.with_body(format!("Moved to {}", self.slug)))
}))
}
fn put(self: Box<Self>, _body: hyper::Body) -> ResponseFuture {
Box::new(self.head()
.and_then(move |head| {
Ok(head
.with_body(format!("Moved to {}", self.slug)))
}))
}
}

View file

@ -7,7 +7,6 @@ use serde_json;
use serde_urlencoded;
use assets::{StyleCss, ScriptJs};
use models;
use site::Layout;
use state::State;
use web::{Resource, ResponseFuture};
@ -54,12 +53,13 @@ fn render_markdown(src: &str) -> String {
pub struct ArticleResource {
state: State,
data: models::ArticleRevision,
article_id: i32,
revision: i32,
}
impl ArticleResource {
pub fn new(state: State, data: models::ArticleRevision) -> Self {
Self { state, data }
pub fn new(state: State, article_id: i32, revision: i32) -> Self {
Self { state, article_id, revision }
}
}
@ -93,22 +93,27 @@ impl Resource for ArticleResource {
script_js_checksum: &'a str,
}
Box::new(self.head().map(move |head|
head
let data = self.state.get_article_revision(self.article_id, self.revision)
.map(|x| x.expect("Data model guarantees that this exists"));
let head = self.head();
Box::new(data.join(head)
.and_then(move |(data, head)| {
Ok(head
.with_body(Layout {
title: &self.data.title,
title: &data.title,
body: &Template {
article_id: self.data.article_id,
revision: self.data.revision,
created: &Local.from_utc_datetime(&self.data.created),
title: &self.data.title,
raw: &self.data.body,
rendered: render_markdown(&self.data.body),
article_id: data.article_id,
revision: data.revision,
created: &Local.from_utc_datetime(&data.created),
title: &data.title,
raw: &data.body,
rendered: render_markdown(&data.body),
script_js_checksum: ScriptJs::checksum(),
},
style_css_checksum: StyleCss::checksum(),
}.to_string())
))
}.to_string()))
}))
}
fn put(self: Box<Self>, body: hyper::Body) -> ResponseFuture {
@ -138,7 +143,7 @@ impl Resource for ArticleResource {
.map_err(Into::into)
})
.and_then(move |update: UpdateArticle| {
self.state.update_article(self.data.article_id, update.base_revision, update.body)
self.state.update_article(self.article_id, update.base_revision, update.body)
})
.and_then(|updated| {
futures::finished(Response::new()

View file

@ -1,3 +1,5 @@
#![recursion_limit="128"] // for diesel's infer_schema!
#[macro_use] extern crate bart_derive;
#[macro_use] extern crate diesel;
#[macro_use] extern crate diesel_codegen;
@ -18,6 +20,7 @@ extern crate serde_urlencoded;
use std::net::SocketAddr;
mod article_redirect_resource;
mod article_resource;
mod assets;
mod db;

View file

@ -2,10 +2,15 @@ use chrono;
#[derive(Debug, Queryable)]
pub struct ArticleRevision {
pub sequence_number: i32,
pub article_id: i32,
pub revision: i32,
pub created: chrono::NaiveDateTime,
pub slug: String,
pub title: String,
pub body: String,
pub latest: bool,
}

View file

@ -17,6 +17,15 @@ pub struct State {
pub type Error = Box<std::error::Error + Send + Sync>;
pub enum SlugLookup {
Miss,
Hit {
article_id: i32,
revision: i32,
},
Redirect(String),
}
impl State {
pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
State {
@ -25,7 +34,7 @@ impl State {
}
}
pub fn get_article_revision_by_id(&self, article_id: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
let connection_pool = self.connection_pool.clone();
self.cpu_pool.spawn_fn(move || {
@ -33,13 +42,61 @@ impl State {
Ok(article_revisions::table
.filter(article_revisions::article_id.eq(article_id))
.order(article_revisions::revision.desc())
.filter(article_revisions::revision.eq(revision))
.limit(1)
.load::<models::ArticleRevision>(&*connection_pool.get()?)?
.pop())
})
}
pub fn lookup_slug(&self, slug: String) -> CpuFuture<SlugLookup, Error> {
#[derive(Queryable)]
struct ArticleRevisionStub {
article_id: i32,
revision: i32,
latest: bool,
}
let connection_pool = self.connection_pool.clone();
self.cpu_pool.spawn_fn(move || {
let conn = connection_pool.get()?;
conn.transaction(|| {
use schema::article_revisions;
Ok(match article_revisions::table
.filter(article_revisions::slug.eq(slug))
.order(article_revisions::sequence_number.desc())
.limit(1)
.select((
article_revisions::article_id,
article_revisions::revision,
article_revisions::latest,
))
.load::<ArticleRevisionStub>(&*conn)?
.pop()
{
None => SlugLookup::Miss,
Some(ref stub) if stub.latest => SlugLookup::Hit {
article_id: stub.article_id,
revision: stub.revision,
},
Some(stub) => SlugLookup::Redirect(
article_revisions::table
.filter(article_revisions::latest.eq(true))
.filter(article_revisions::article_id.eq(stub.article_id))
.limit(1)
.select(article_revisions::slug)
.load::<String>(&*conn)?
.pop()
.expect("Data model requires this to exist")
)
})
})
})
}
pub fn update_article(&self, article_id: i32, base_revision: i32, body: String) -> CpuFuture<models::ArticleRevision, Error> {
let connection_pool = self.connection_pool.clone();
@ -49,12 +106,17 @@ impl State {
conn.transaction(|| {
use schema::article_revisions;
let (latest_revision, title) = article_revisions::table
// TODO: Get title and slug as parameters to update_article, so we can... update those
let (latest_revision, title, slug) = article_revisions::table
.filter(article_revisions::article_id.eq(article_id))
.order(article_revisions::revision.desc())
.limit(1)
.select((article_revisions::revision, article_revisions::title))
.load::<(i32, String)>(&*conn)?
.select((
article_revisions::revision,
article_revisions::title,
article_revisions::slug,
))
.load::<(i32, String, String)>(&*conn)?
.pop()
.unwrap_or_else(|| unimplemented!("TODO Missing an error type"));
@ -65,20 +127,33 @@ impl State {
}
let new_revision = base_revision + 1;
#[derive(Insertable)]
#[table_name="article_revisions"]
struct NewRevision<'a> {
article_id: i32,
revision: i32,
slug: &'a str,
title: &'a str,
body: &'a str,
latest: bool,
}
diesel::update(
article_revisions::table
.filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(base_revision))
)
.set(article_revisions::latest.eq(false))
.execute(&*conn)?;
diesel::insert(&NewRevision {
article_id,
revision: new_revision,
slug: &slug,
title: &title,
body: &body,
latest: true,
})
.into(article_revisions::table)
.execute(&*conn)?;

View file

@ -4,6 +4,7 @@ use futures::{Future, finished};
use assets::*;
use article_resource::ArticleResource;
use article_redirect_resource::ArticleRedirectResource;
use state::State;
use web::{Lookup, Resource};
@ -69,15 +70,18 @@ impl Lookup for WikiLookup {
return Box::new(finished(None));
}
if let Ok(article_id) = slug.parse() {
let state = self.state.clone();
Box::new(self.state.get_article_revision_by_id(article_id)
.and_then(|x| Ok(x.map(move |article|
Box::new(ArticleResource::new(state, article)) as Box<Resource + Sync + Send>
)))
)
} else {
Box::new(finished(None))
}
use state::SlugLookup;
Box::new(self.state.lookup_slug(slug.to_owned())
.and_then(|x| Ok(match x {
SlugLookup::Miss => None,
SlugLookup::Hit { article_id, revision } =>
Some(Box::new(ArticleResource::new(state, article_id, revision))
as Box<Resource + Sync + Send>),
SlugLookup::Redirect(slug) =>
Some(Box::new(ArticleRedirectResource::new(slug))
as Box<Resource + Sync + Send>)
})))
}
}