Compare commits

..

1 commit

Author SHA1 Message Date
Magnus Hoff
7e1770fb07 Start exploring a refactoring to components with scopes and resources 2018-08-03 09:23:06 +02:00
58 changed files with 2361 additions and 3800 deletions

View file

@ -19,6 +19,21 @@ script:
- strip -s target/x86_64-unknown-linux-musl/release/sausagewiki - strip -s target/x86_64-unknown-linux-musl/release/sausagewiki
- XZ_OPT=-9 tar Jcf sausagewiki.tar.xz -C target/x86_64-unknown-linux-musl/release/ sausagewiki - XZ_OPT=-9 tar Jcf sausagewiki.tar.xz -C target/x86_64-unknown-linux-musl/release/ sausagewiki
deploy:
provider: releases
api_key:
secure: NmM+uk4ijbv5wFF3O7w9KLTrGYbe1mxWAzJDR8cD9rimgORWNQKlHOZtthAQxVgtvmhKAMkzwglgQSX3p0w4yGK5oaV3oO1aA21dzlf0BXL7/BOxgYSTjV+x8O1uIu57ERnf4k2WATCpLSx4r4LpfxMgjdEtIl6LDAnV/zX+HKu7pZAzXvmkS22m5CJbEY4M6DpCAIfpP99dolnyU7h5/AR1njMmzSqGB/naVe5O2j0sBveInsjC+4gPSh9QT/VHZBxbOctcy+kSzwN4iDktkFdYIGe9Z2sDjXsiI39ihXntyOHXA2iVpdkgpIGeLIYBOo+DobgMdS45CzZQ2y9zLnwXwODCgrh8qexxnRpC8RG7uKuVe50R6v4HDPgkjwCJoHicxaEUDiPIsg5qCxEfMYd5qUt21OwEwBN9N8K/RZD0fmgKLE5lQiyxubufeSB4wjpWrXct2M46t25qPFobbZ0kzLCXtZHtKk1mkkk+EWv8UOhRvJ8ih0Fb9ivSOrN6YA1/eRd9/SRntkJriMYmfAW50W3DnyFnPHqdV+x+jHJgcB+DnaDvQnPamk93ZDF/UyUDjVuPJFd0BAFxoRUy6HGaF/yajH4r9g3EdlfSu2IrGDo4vIA9qawBYpHyaSGvYwdCDx4/oUPIAf8sLBS01WOaDJgcmmFey7A/OqSEt6Q=
file: sausagewiki.tar.xz
skip_cleanup: true
on:
repo: maghoff/sausagewiki
branch: master
rust: stable
cache: cargo
before_cache:
- chmod -R a+r $HOME/.cargo
branches: branches:
except: except:
- "/^untagged-/" - "/^untagged-/"

1565
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -4,32 +4,31 @@ description = "A wiki engine"
license = "GPL-3.0" license = "GPL-3.0"
name = "sausagewiki" name = "sausagewiki"
version = "0.1.0-dev" version = "0.1.0-dev"
edition = "2018"
[build-dependencies] [build-dependencies]
quote = "1.0.17" quote = "0.3.10"
walkdir = "1" walkdir = "1"
[build-dependencies.diesel] [build-dependencies.diesel]
default-features = false default-features = false
features = ["sqlite", "chrono"] features = ["sqlite", "chrono"]
version = "1.4.8" version = "1.3.0"
[build-dependencies.diesel_migrations] [build-dependencies.diesel_migrations]
default-features = false default-features = false
features = ["sqlite"] features = ["sqlite"]
version = "1.4.0" version = "1.3.0"
[dependencies] [dependencies]
bart = "0.1.6" bart = "0.1.4"
bart_derive = "0.1.6" bart_derive = "0.1.4"
chrono = "0.4" chrono = "0.4"
clap = "2.31" clap = "2.31"
diff = "0.1" diff = "0.1"
futures = "0.1" futures = "0.1"
futures-cpupool = "0.1" futures-cpupool = "0.1"
hyper = "0.11" hyper = "0.11"
lazy_static = "1.4.0" lazy_static = "0.2"
maplit = "1" maplit = "1"
percent-encoding = "1.0" percent-encoding = "1.0"
r2d2 = "0.8" r2d2 = "0.8"
@ -39,14 +38,12 @@ seahash = "3.0.5"
serde = "1.0.0" serde = "1.0.0"
serde_derive = "1.0.0" serde_derive = "1.0.0"
serde_json = "1.0" serde_json = "1.0"
serde_urlencoded = "0.5.3" serde_urlencoded = "0.5"
slug = "0.1" slug = "0.1"
titlecase = "0.10" titlecase = "0.10"
tokio-io = "0.1" tokio-io = "0.1"
tokio-proto = "0.1" tokio-proto = "0.1"
tokio-service = "0.1" tokio-service = "0.1"
serde_plain = "0.3.0"
rand = "0.5.5"
[dependencies.codegen] [dependencies.codegen]
path = "libs/codegen" path = "libs/codegen"
@ -54,21 +51,21 @@ path = "libs/codegen"
[dependencies.diesel] [dependencies.diesel]
default-features = false default-features = false
features = ["sqlite", "chrono"] features = ["sqlite", "chrono"]
version = "1.4.8" version = "1.3.0"
[dependencies.diesel_infer_schema] [dependencies.diesel_infer_schema]
default-features = false default-features = false
features = ["sqlite"] features = ["sqlite"]
version = "1.4.0" version = "1.3.0"
[dependencies.diesel_migrations] [dependencies.diesel_migrations]
default-features = false default-features = false
features = ["sqlite"] features = ["sqlite"]
version = "1.4.0" version = "1.3.0"
[dependencies.libsqlite3-sys] [dependencies.libsqlite3-sys]
features = ["bundled"] features = ["bundled"]
version = "<0.23.0" version = "0.9.1"
[dependencies.num] [dependencies.num]
default-features = false default-features = false
@ -79,7 +76,7 @@ default-features = false
git = "https://github.com/maghoff/pulldown-cmark.git" git = "https://github.com/maghoff/pulldown-cmark.git"
[dev-dependencies] [dev-dependencies]
indoc = "1.0.4" indoc = "0.2"
matches = "0.1" matches = "0.1"
[features] [features]

View file

@ -72,4 +72,3 @@ Command line arguments
Sausagewiki will create an SQLite database file with the filename given in the Sausagewiki will create an SQLite database file with the filename given in the
`DATABASE` parameter and open an HTTP server bound to the configured address, `DATABASE` parameter and open an HTTP server bound to the configured address,
`<address>:<port>`. `<address>:<port>`.

View file

@ -1,5 +1,3 @@
"use strict";
function autosizeTextarea(textarea, shadow) { function autosizeTextarea(textarea, shadow) {
shadow.style.width = textarea.clientWidth + "px"; shadow.style.width = textarea.clientWidth + "px";
shadow.value = textarea.value; shadow.value = textarea.value;
@ -8,17 +6,16 @@ function autosizeTextarea(textarea, shadow) {
function queryArgsFromForm(form) { function queryArgsFromForm(form) {
const items = []; const items = [];
for (const {name, value, type, checked} of form.elements) { for (const {name, value} of form.elements) {
if (!name) continue; if (!name) continue;
if (type === "radio" && !checked) continue;
items.push(encodeURIComponent(name) + '=' + encodeURIComponent(value)); items.push(encodeURIComponent(name) + '=' + encodeURIComponent(value));
} }
return items.join('&'); return items.join('&');
} }
function isEdited(form) { function isEdited(form) {
for (const {name, value, defaultValue, checked, defaultChecked} of form.elements) { for (const {name, value, defaultValue} of form.elements) {
if (name && ((value !== defaultValue) || (checked !== defaultChecked))) return true; if (name && (value !== defaultValue)) return true;
} }
return false; return false;
} }
@ -59,15 +56,8 @@ function confirmDiscard() {
return popup(instantiate("confirm-discard")); return popup(instantiate("confirm-discard"));
} }
const state = { let hasBeenOpen = false;
hasBeenOpen: false,
saving: false,
editing: function () { return document.querySelector(".container").classList.contains('edit'); },
hasCancelUrl: function () { return document.querySelector("a.button-cancel").getAttribute('href') !== ""; }
};
function openEditor() { function openEditor() {
const bodyElement = document.querySelector("body");
const container = document.querySelector(".container"); const container = document.querySelector(".container");
const rendered = container.querySelector(".rendered"); const rendered = container.querySelector(".rendered");
const editor = container.querySelector(".editor"); const editor = container.querySelector(".editor");
@ -75,61 +65,31 @@ function openEditor() {
const shadow = editor.querySelector('textarea.shadow-control'); const shadow = editor.querySelector('textarea.shadow-control');
const form = document.getElementById('article-editor'); const form = document.getElementById('article-editor');
const cancel = form.querySelector('.cancel'); const cancel = form.querySelector('.cancel');
const cancelButton = form.querySelector('button.button-cancel');
const cancelInteractionGroup = form.querySelector(".cancel-interaction-group");
const footer = document.querySelector("footer"); const footer = document.querySelector("footer");
const lastUpdated = footer.querySelector(".last-updated"); const lastUpdated = footer.querySelector(".last-updated");
textarea.style.height = rendered.clientHeight + "px"; textarea.style.height = rendered.clientHeight + "px";
retainScrollRatio(() => {
container.classList.add('edit'); container.classList.add('edit');
autosizeTextarea(textarea, shadow);
});
updateFormEnabledState();
if (state.hasBeenOpen) return; autosizeTextarea(textarea, shadow);
state.hasBeenOpen = true;
textarea.focus();
if (hasBeenOpen) return;
hasBeenOpen = true;
textarea.addEventListener('input', () => autosizeTextarea(textarea, shadow)); textarea.addEventListener('input', () => autosizeTextarea(textarea, shadow));
window.addEventListener('resize', () => autosizeTextarea(textarea, shadow)); window.addEventListener('resize', () => autosizeTextarea(textarea, shadow));
function updateFormEnabledState() { form.addEventListener("submit", function (ev) {
const baseEnabled = !state.saving && state.editing(); ev.preventDefault();
const enabled = { ev.stopPropagation();
cancel: baseEnabled && state.hasCancelUrl(),
};
cancelInteractionGroup.classList.remove(!enabled.cancel ? "interaction-group--root--enabled" : "interaction-group--root--disabled");
cancelInteractionGroup.classList.add(enabled.cancel ? "interaction-group--root--enabled" : "interaction-group--root--disabled");
for (const el of form.elements) {
el.disabled = !baseEnabled;
}
cancelButton.disabled = true;
// TODO: edit-link in footer?
}
function retainScrollRatio(innerFunction) {
const scrollElement = document.body.parentElement;
const savedScrollRatio = scrollElement.scrollTop / (scrollElement.scrollHeight - scrollElement.clientHeight);
innerFunction();
scrollElement.scrollTop = (scrollElement.scrollHeight - scrollElement.clientHeight) * savedScrollRatio;
}
function closeEditor() {
retainScrollRatio(() => container.classList.remove('edit'));
document.activeElement && document.activeElement.blur();
}
function doSave() {
state.saving = true;
updateFormEnabledState();
const body = queryArgsFromForm(form); const body = queryArgsFromForm(form);
textarea.disabled = true;
// TODO Disable other interaction as well: title editor, cancel and OK buttons
fetch( fetch(
form.getAttribute("action"), form.getAttribute("action"),
@ -149,8 +109,7 @@ function openEditor() {
if (probablyLoginRedirect) { if (probablyLoginRedirect) {
return loginDialog(response.url) return loginDialog(response.url)
.then(() => { .then(() => {
state.saving = false; textarea.disabled = false;
updateFormEnabledState();
}); });
} }
@ -158,10 +117,8 @@ function openEditor() {
return response.json() return response.json()
.then(result => { .then(result => {
// Update url-bar, page title, footer and cancel link // Update url-bar, page title and footer
const url = result.slug == "" ? "." : result.slug; window.history.replaceState(null, result.title, result.slug == "" ? "." : result.slug);
window.history.replaceState(null, result.title, url);
cancel.setAttribute("href", url);
document.querySelector("title").textContent = result.title; document.querySelector("title").textContent = result.title;
lastUpdated.innerHTML = result.last_updated; lastUpdated.innerHTML = result.last_updated;
lastUpdated.classList.remove("missing"); lastUpdated.classList.remove("missing");
@ -172,22 +129,17 @@ function openEditor() {
form.elements.title.value = result.title; form.elements.title.value = result.title;
shadow.value = textarea.value = result.body; shadow.value = textarea.value = result.body;
form.querySelector(`.theme-picker--option[value=${JSON.stringify(result.theme)}]`).checked = true;
bodyElement.className = `theme-${result.theme}`;
// Update form: // Update form:
form.elements.base_revision.value = result.revision; form.elements.base_revision.value = result.revision;
for (const element of form.elements) { for (const element of form.elements) {
element.defaultValue = element.value; element.defaultValue = element.value;
element.defaultChecked = element.checked;
} }
if (!result.conflict) { if (!result.conflict) {
closeEditor(); container.classList.remove('edit');
} }
state.saving = false; textarea.disabled = false;
updateFormEnabledState();
autosizeTextarea(textarea, shadow); autosizeTextarea(textarea, shadow);
if (result.conflict) { if (result.conflict) {
@ -197,37 +149,23 @@ function openEditor() {
} }
}); });
}).catch(err => { }).catch(err => {
state.saving = false; textarea.disabled = false;
updateFormEnabledState();
console.error(err); console.error(err);
return alertAsync(err.toString()); return alertAsync(err.toString());
}); });
}
function doCancel() {
Promise.resolve(!isEdited(form) || confirmDiscard())
.then(doReset => {
if (doReset) {
closeEditor();
updateFormEnabledState();
form.reset();
let selectedTheme = form.querySelector(`.theme-picker--option[checked]`).value;
bodyElement.className = `theme-${selectedTheme}`;
}
});
}
form.addEventListener("submit", function (ev) {
ev.preventDefault();
ev.stopPropagation();
doSave();
}); });
cancel.addEventListener('click', function (ev) { cancel.addEventListener('click', function (ev) {
ev.preventDefault(); ev.preventDefault();
ev.stopPropagation(); ev.stopPropagation();
doCancel();
Promise.resolve(!isEdited(form) || confirmDiscard())
.then(doReset => {
if (doReset) {
container.classList.remove('edit');
form.reset();
}
});
}); });
window.addEventListener("beforeunload", function (ev) { window.addEventListener("beforeunload", function (ev) {
@ -236,43 +174,8 @@ function openEditor() {
return ev.returnValue = "Discard changes?"; return ev.returnValue = "Discard changes?";
} }
}); });
document.addEventListener("keypress", function (ev) {
const accel = ev.ctrlKey || ev.metaKey; // Imprecise, but works cross platform
if (ev.key === "Enter" && accel) {
if (!state.editing()) return;
ev.stopPropagation();
ev.preventDefault();
doSave();
}
});
const themeOptions = form.querySelectorAll(".theme-picker--option");
for (let themeOption of themeOptions) {
themeOption.addEventListener("click", function (ev) {
bodyElement.className = `theme-${ev.target.value}`;
});
}
} }
function initializeTheme() {
const form = document.getElementById('article-editor');
let preSelectedTheme = form.querySelector(`.theme-picker--option[checked]`);
if (preSelectedTheme) return;
let themes = form.querySelectorAll(`.theme-picker--option`);
let randomThemeId = (Math.random() * themes.length) | 0;
let theme = themes[randomThemeId];
theme.defaultChecked = theme.checked = true;
document.querySelector("body").className = `theme-${theme.value}`;
}
initializeTheme();
document document
.getElementById("openEditor") .getElementById("openEditor")
.addEventListener("click", function (ev) { .addEventListener("click", function (ev) {

View file

@ -2,10 +2,6 @@
display: none; display: none;
} }
input {
margin: 0; /* reset for Safari */
}
html { html {
font-family: "Apple Garamond", "Baskerville", font-family: "Apple Garamond", "Baskerville",
"Times New Roman", "Droid Serif", "Times", "Times New Roman", "Droid Serif", "Times",
@ -58,16 +54,15 @@ h1+*, h2+*, h3+*, h4+*, h5+*, h6+* {
article>hr { article>hr {
border: none; border: none;
border-top: 6px solid var(--theme-main); border-top: 1px solid black;
width: 40px; max-width: 400px;
width: 70%;
margin: 20px auto; margin: 20px auto;
} }
.notice { .notice {
background: var(--theme-main); background: lightyellow;
color: var(--theme-text); padding: 16px 48px;
padding: 1px 24px;
font-size: 18px; font-size: 18px;
line-height: 32px; line-height: 32px;
@ -76,9 +71,6 @@ article>hr {
width: 100%; width: 100%;
margin: 30px auto; margin: 30px auto;
} }
.notice a {
color: var(--theme-link);
}
.hero { .hero {
background: var(--theme-main); background: var(--theme-main);
@ -309,146 +301,29 @@ h1>input {
bottom: 0; bottom: 0;
left: 0; left: 0;
box-sizing: border-box;
text-align: right;
box-shadow: 0px 5px 20px rgba(0,0,0, 0.2); box-shadow: 0px 5px 20px rgba(0,0,0, 0.2);
background: white; background: var(--theme-main);
color: var(--theme-text); color: var(--theme-text);
padding: 10px 10px; padding: 10px 20px;
transform: translate(0, 65px); transform: translate(0, 65px);
transition: transform 100ms; transition: transform 100ms;
transition-timing-function: linear; transition-timing-function: linear;
pointer-events: none;
} }
.edit .editor-controls { .edit .editor-controls {
transform: translate(0, 0); transform: translate(0, 0);
transition-timing-function: cubic-bezier(.17,.84,.44,1); transition-timing-function: cubic-bezier(.17,.84,.44,1);
pointer-events: unset;
} }
.theme-picker { @media (min-width: 630px) {
position: absolute;
top: 0;
left: 0;
right: 0;
display: flex;
}
.theme-picker--option {
/* reset */
-webkit-appearance: none;
-moz-appearance: none;
-o-appearance: none;
-ms-appearance: none;
appearance: none;
border: none;
border-radius: 0;
margin: 0;
padding: 0;
height: 20px;
background: var(--theme-main);
color: var(--theme-text);
flex-grow: 1;
position: relative;
}
.theme-picker--option:checked::after {
content: " ";
display: block;
background: white;
border-radius: 5px;
width: 10px;
height: 10px;
position: absolute;
top: calc(50% - 5px);
left: calc(50% - 5px);
}
.button {
border-radius: 2px;
display: inline-block;
width: 120px;
text-align: center;
border: none;
cursor: pointer;
font-family: -apple-system, BlinkMacSystemFont,
"Segoe UI", "Roboto", "Oxygen",
"Ubuntu", "Cantarell", "Fira Sans",
"Droid Sans", "Helvetica Neue", sans-serif;
font-size: 16px;
line-height: 20px;
padding: 10px 0px;
margin-left: 10px;
}
.button[disabled] {
opacity: 0.5;
cursor: default;
}
.button:hover {
text-decoration: none;
}
.button:not([disabled]):hover, .button:not([disabled]):active {
background: var(--button-alt);
}
.button-cancel {
background: white;
color: var(--theme-main);
--button-alt: #f0f0f0;
}
.button-default {
background: var(--theme-main);
color: var(--theme-text);
--button-alt: var(--theme-input);
}
.cancel-interaction-group {
display: inline;
}
.interaction-group--root--enabled .interaction-group--disabled {
display: none;
}
.interaction-group--root--disabled .interaction-group--enabled {
display: none;
}
@media (min-width: 960px) {
/* min-width is calculated like this:
body-width = width of the main text column
controls-width = width of .editor-controls element, including drop-shadow
min-width = body-width + 2*controls-width = 600 + 2 * 180 = 960
*/
.editor-controls { .editor-controls {
border-radius: 2px;
position: fixed; position: fixed;
left: calc(50% + 320px); left: calc(50vw + 320px);
width: 140px; width: 120px;
top: calc(50% - 55px); top: calc(50vh - 40px);
height: 110px; height: 80px;
padding: 10px;
transform: translate(20px, 0); transform: translate(20px, 0);
opacity: 0; opacity: 0;
@ -462,11 +337,6 @@ h1>input {
transition-timing-function: cubic-bezier(.17,.84,.44,1); transition-timing-function: cubic-bezier(.17,.84,.44,1);
opacity: 1; opacity: 1;
} }
.button {
margin: 0;
margin-bottom: 10px;
}
} }
article ul.search-results { article ul.search-results {
@ -501,7 +371,7 @@ article ul.search-results {
.search { .search {
text-align: center; text-align: center;
margin-top: 45px; margin-top: 30px;
position: relative; position: relative;
} }
@ -532,9 +402,9 @@ input[type="search"] {
text-overflow: ellipsis; text-overflow: ellipsis;
} }
input[type="search"]::placeholder, .hero input::placeholder { input[type="search"]::placeholder {
color: var(--theme-text); color: var(--theme-text);
opacity: 0.6; opacity: 0.5;
} }
.search .live-results { .search .live-results {
@ -716,10 +586,6 @@ input[type="search"]::placeholder, .hero input::placeholder {
margin: 0 auto; margin: 0 auto;
} }
article>hr {
border-color: black;
}
h1 { h1 {
font-size: 22pt; font-size: 22pt;
line-height: 33pt; line-height: 33pt;

View file

@ -16,18 +16,9 @@
.proto { .proto {
display: none; display: none;
} }
#bar {
display: flex;
width: 100%;
}
#bar>div {
height: 30px;
flex-grow: 1;
}
</style> </style>
</head> </head>
<body> <body>
<div id="bar"></div>
<div class="proto"> <div class="proto">
<div class="themed">The <span class="link">quick</span> brown <span class="link">dog</span> jumps over the lazy log <span class="theme-name"></span></div> <div class="themed">The <span class="link">quick</span> brown <span class="link">dog</span> jumps over the lazy log <span class="theme-name"></span></div>
<div class="themed"><input type=search placeholder=placeholder> <input type=search value="Bacon"></div> <div class="themed"><input type=search placeholder=placeholder> <input type=search value="Bacon"></div>
@ -42,13 +33,6 @@
block.querySelector(".theme-name").textContent = theme; block.querySelector(".theme-name").textContent = theme;
body.appendChild(block); body.appendChild(block);
} }
const bar = document.querySelector("#bar");
for (theme of themes) {
const block = document.createElement("div");
block.className = `theme-${theme} themed`;
bar.appendChild(block);
}
</script> </script>
</body> </body>
</html> </html>

View file

@ -1,9 +1,10 @@
#[macro_use] #[macro_use] extern crate quote;
extern crate diesel; #[macro_use] extern crate diesel;
extern crate diesel_migrations;
extern crate walkdir;
use diesel::prelude::*;
use diesel::Connection; use diesel::Connection;
use quote::quote; use diesel::prelude::*;
use std::env; use std::env;
use std::fs::File; use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
@ -14,40 +15,31 @@ use walkdir::WalkDir;
mod sqlfunc { mod sqlfunc {
use diesel::sql_types::Text; use diesel::sql_types::Text;
sql_function!(fn markdown_to_fts(text: Text) -> Text); sql_function!(fn markdown_to_fts(text: Text) -> Text);
sql_function!(fn theme_from_str_hash(text: Text) -> Text);
} }
fn main() { fn main() {
let out_dir = env::var("OUT_DIR").expect("cargo must set OUT_DIR"); let out_dir = env::var("OUT_DIR").expect("cargo must set OUT_DIR");
let db_path = Path::new(&out_dir).join("build.db"); let db_path = Path::new(&out_dir).join("build.db");
let db_path = db_path let db_path = db_path.to_str().expect("Will only work for Unicode-representable paths");
.to_str()
.expect("Will only work for Unicode-representable paths");
let _ignore_failure = std::fs::remove_file(db_path); let _ignore_failure = std::fs::remove_file(db_path);
let connection = SqliteConnection::establish(db_path) let connection = SqliteConnection::establish(db_path)
.unwrap_or_else(|_| panic!("Error esablishing a database connection to {}", db_path)); .expect(&format!("Error esablishing a database connection to {}", db_path));
// Integer is a dummy placeholder. Compiling fails when passing (). // Integer is a dummy placeholder. Compiling fails when passing ().
diesel::expression::sql_literal::sql::<diesel::sql_types::Integer>("PRAGMA foreign_keys = ON") diesel::expression::sql_literal::sql::<(diesel::sql_types::Integer)>("PRAGMA foreign_keys = ON")
.execute(&connection) .execute(&connection)
.expect("Should be able to enable foreign keys"); .expect("Should be able to enable foreign keys");
sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() }) sqlfunc::markdown_to_fts::register_impl(&connection, |_: String| -> String { unreachable!() }).unwrap();
.unwrap();
sqlfunc::theme_from_str_hash::register_impl(&connection, |_: String| -> String {
unreachable!()
})
.unwrap();
diesel_migrations::run_pending_migrations(&connection).unwrap(); diesel_migrations::run_pending_migrations(&connection).unwrap();
let infer_schema_path = Path::new(&out_dir).join("infer_schema.rs"); let infer_schema_path = Path::new(&out_dir).join("infer_schema.rs");
let mut file = File::create(infer_schema_path).expect("Unable to open file for writing"); let mut file = File::create(infer_schema_path).expect("Unable to open file for writing");
file.write_all( file.write_all(quote! {
quote! {
mod __diesel_infer_schema_articles { mod __diesel_infer_schema_articles {
infer_table_from_schema!(#db_path, "articles"); infer_table_from_schema!(#db_path, "articles");
} }
@ -57,21 +49,18 @@ fn main() {
infer_table_from_schema!(#db_path, "article_revisions"); infer_table_from_schema!(#db_path, "article_revisions");
} }
pub use self::__diesel_infer_schema_article_revisions::*; pub use self::__diesel_infer_schema_article_revisions::*;
} }.as_str().as_bytes()).expect("Unable to write to file");
.to_string()
.as_bytes(),
)
.expect("Unable to write to file");
for entry in WalkDir::new("migrations") for entry in WalkDir::new("migrations").into_iter().filter_map(|e| e.ok()) {
.into_iter()
.filter_map(|e| e.ok())
{
println!("cargo:rerun-if-changed={}", entry.path().display()); println!("cargo:rerun-if-changed={}", entry.path().display());
} }
// For build_config.rs // For build_config.rs
for env_var in &["CONTINUOUS_INTEGRATION", "TRAVIS_BRANCH", "TRAVIS_COMMIT"] { for env_var in &[
"CONTINUOUS_INTEGRATION",
"TRAVIS_BRANCH",
"TRAVIS_COMMIT",
] {
println!("cargo:rerun-if-env-changed={}", env_var); println!("cargo:rerun-if-env-changed={}", env_var);
} }
} }

View file

@ -1,13 +1,11 @@
#![recursion_limit = "128"] #![recursion_limit="128"]
#[macro_use] #[macro_use] extern crate quote;
extern crate quote; #[macro_use] extern crate serde_derive;
#[macro_use]
extern crate serde_derive;
extern crate base64; extern crate base64;
extern crate proc_macro; extern crate proc_macro;
extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate serde;
extern crate sha2; extern crate sha2;
extern crate syn; extern crate syn;

View file

@ -2,10 +2,13 @@ use std::fs::File;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote; use quote;
use serde::de::IgnoredAny;
use serde_json; use serde_json;
use serde::de::IgnoredAny;
const SOURCES: &[&str] = &["src/licenses/license-hound.json", "src/licenses/other.json"]; const SOURCES: &[&str] = &[
"src/licenses/license-hound.json",
"src/licenses/other.json",
];
#[derive(Debug, Copy, Clone, Deserialize)] #[derive(Debug, Copy, Clone, Deserialize)]
pub enum LicenseId { pub enum LicenseId {
@ -19,7 +22,7 @@ impl LicenseId {
fn include_notice(&self) -> bool { fn include_notice(&self) -> bool {
use self::LicenseId::*; use self::LicenseId::*;
match self { match self {
Mpl2 => false, &Mpl2 => false,
_ => true, _ => true,
} }
} }
@ -29,10 +32,10 @@ impl quote::ToTokens for LicenseId {
fn to_tokens(&self, tokens: &mut quote::Tokens) { fn to_tokens(&self, tokens: &mut quote::Tokens) {
use self::LicenseId::*; use self::LicenseId::*;
tokens.append(match self { tokens.append(match self {
Bsd3Clause => "Bsd3Clause", &Bsd3Clause => "Bsd3Clause",
Mit => "Mit", &Mit => "Mit",
Mpl2 => "Mpl2", &Mpl2 => "Mpl2",
Ofl11 => "Ofl11", &Ofl11 => "Ofl11",
}); });
} }
} }
@ -53,16 +56,12 @@ struct LicenseReport {
impl quote::ToTokens for LicenseReport { impl quote::ToTokens for LicenseReport {
fn to_tokens(&self, tokens: &mut quote::Tokens) { fn to_tokens(&self, tokens: &mut quote::Tokens) {
let c: &LicenseDescription = self.conclusion.as_ref().unwrap(); let c: &LicenseDescription = self.conclusion.as_ref().unwrap();
let (name, link, copyright, license) = ( let (name, link, copyright, license) =
&self.package_name, (&self.package_name, &c.link, &c.copyright_notice, &c.chosen_license);
&c.link,
&c.copyright_notice,
&c.chosen_license,
);
let link = match *link { let link = match link {
Some(ref link) => quote! { Some(#link) }, &Some(ref link) => quote! { Some(#link) },
None => quote! { None }, &None => quote! { None },
}; };
let copyright = match license.include_notice() { let copyright = match license.include_notice() {
@ -86,10 +85,7 @@ pub fn licenses(_input: TokenStream) -> TokenStream {
.iter() .iter()
.map(|x| -> Vec<LicenseReport> { serde_json::from_reader(File::open(x).unwrap()).unwrap() }) .map(|x| -> Vec<LicenseReport> { serde_json::from_reader(File::open(x).unwrap()).unwrap() })
.map(|x| x.into_iter().filter(|x| x.conclusion.is_ok())) .map(|x| x.into_iter().filter(|x| x.conclusion.is_ok()))
.fold(vec![], |mut a, b| { .fold(vec![], |mut a, b| { a.extend(b); a });
a.extend(b);
a
});
license_infos.sort_unstable_by_key(|x| x.package_name.to_lowercase()); license_infos.sort_unstable_by_key(|x| x.package_name.to_lowercase());

View file

@ -10,29 +10,30 @@ fn user_crate_root() -> PathBuf {
std::env::current_dir().expect("Unable to get current directory") std::env::current_dir().expect("Unable to get current directory")
} }
fn find_attr<'a>(attrs: &'a [syn::Attribute], name: &str) -> Option<&'a str> { fn find_attr<'a>(attrs: &'a Vec<syn::Attribute>, name: &str) -> Option<&'a str> {
attrs attrs.iter()
.iter()
.find(|&x| x.name() == name) .find(|&x| x.name() == name)
.and_then(|attr| match attr.value { .and_then(|ref attr| match &attr.value {
syn::MetaItem::NameValue(_, syn::Lit::Str(ref template, _)) => Some(template), &syn::MetaItem::NameValue(_, syn::Lit::Str(ref template, _)) => Some(template),
_ => None, _ => None
}) })
.map(|x| x.as_ref()) .map(|x| x.as_ref())
} }
fn buf_file<P: AsRef<Path>>(filename: P) -> Vec<u8> { fn buf_file<P: AsRef<Path>>(filename: P) -> Vec<u8> {
let mut f = File::open(filename).expect("Unable to open file for reading"); let mut f = File::open(filename)
.expect("Unable to open file for reading");
let mut buf = Vec::new(); let mut buf = Vec::new();
f.read_to_end(&mut buf).expect("Unable to read file"); f.read_to_end(&mut buf)
.expect("Unable to read file");
buf buf
} }
fn calculate_checksum<P: AsRef<Path>>(filename: P) -> String { fn calculate_checksum<P: AsRef<Path>>(filename: P) -> String {
use base64::*; use base64::*;
use sha2::{Digest, Sha256}; use sha2::{Sha256, Digest};
encode_config(&Sha256::digest(&buf_file(filename)), URL_SAFE) encode_config(&Sha256::digest(&buf_file(filename)), URL_SAFE)
} }
@ -41,24 +42,23 @@ pub fn static_resource(input: TokenStream) -> TokenStream {
let s = input.to_string(); let s = input.to_string();
let ast = syn::parse_macro_input(&s).unwrap(); let ast = syn::parse_macro_input(&s).unwrap();
let filename = let filename = find_attr(&ast.attrs, "filename")
find_attr(&ast.attrs, "filename").expect("The `filename` attribute must be specified"); .expect("The `filename` attribute must be specified");
let abs_filename = user_crate_root().join(filename); let abs_filename = user_crate_root().join(filename);
let abs_filename = abs_filename let abs_filename = abs_filename.to_str().expect("Absolute file path must be valid Unicode");
.to_str()
.expect("Absolute file path must be valid Unicode");
let checksum = calculate_checksum(&abs_filename); let checksum = calculate_checksum(&abs_filename);
let path: &Path = filename.as_ref(); let path: &Path = filename.as_ref();
let resource_name = format!( let resource_name =
"{}-{}.{}", format!("{}-{}.{}",
path.file_stem().unwrap().to_str().unwrap(), path.file_stem().unwrap().to_str().unwrap(),
checksum, checksum,
path.extension().unwrap().to_str().unwrap() path.extension().unwrap().to_str().unwrap()
); );
let mime = find_attr(&ast.attrs, "mime").expect("The `mime` attribute must be specified"); let mime = find_attr(&ast.attrs, "mime")
.expect("The `mime` attribute must be specified");
let name = &ast.ident; let name = &ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();

View file

@ -1,7 +0,0 @@
ALTER TABLE article_revisions ADD COLUMN theme TEXT NOT NULL CHECK (theme IN (
'red', 'pink', 'purple', 'deep-purple', 'indigo', 'blue', 'light-blue',
'cyan', 'teal', 'green', 'light-green', 'lime', 'yellow', 'amber',
'orange', 'deep-orange', 'brown', 'gray', 'blue-gray'
)) DEFAULT 'red';
UPDATE article_revisions SET theme=theme_from_str_hash(title);

View file

@ -1,8 +1,8 @@
#[cfg(not(feature = "dynamic-assets"))] #[cfg(not(feature="dynamic-assets"))]
mod static_assets { mod static_assets {
use crate::web::{Resource, ResponseFuture};
use futures::Future;
use std::collections::HashMap; use std::collections::HashMap;
use futures::Future;
use web::{Resource, ResponseFuture};
// The CSS should be built to a single CSS file at compile time // The CSS should be built to a single CSS file at compile time
#[derive(StaticResource)] #[derive(StaticResource)]
@ -32,8 +32,8 @@ mod static_assets {
// #[mime = "application/font-woff"] // #[mime = "application/font-woff"]
// pub struct AmaticFont; // pub struct AmaticFont;
type BoxResource = Box<dyn Resource + Sync + Send>; type BoxResource = Box<Resource + Sync + Send>;
type ResourceFn = Box<dyn Fn() -> BoxResource + Sync + Send>; type ResourceFn = Box<Fn() -> BoxResource + Sync + Send>;
lazy_static! { lazy_static! {
pub static ref ASSETS_MAP: HashMap<&'static str, ResourceFn> = hashmap!{ pub static ref ASSETS_MAP: HashMap<&'static str, ResourceFn> = hashmap!{
// The CSS should be built to a single CSS file at compile time // The CSS should be built to a single CSS file at compile time
@ -52,39 +52,31 @@ mod static_assets {
} }
} }
#[cfg(not(feature = "dynamic-assets"))] #[cfg(not(feature="dynamic-assets"))]
pub use self::static_assets::*; pub use self::static_assets::*;
#[cfg(feature = "dynamic-assets")] #[cfg(feature="dynamic-assets")]
mod dynamic_assets { mod dynamic_assets {
pub struct ThemesCss; pub struct ThemesCss;
impl ThemesCss { impl ThemesCss {
pub fn resource_name() -> &'static str { pub fn resource_name() -> &'static str { "themes.css" }
"themes.css"
}
} }
pub struct StyleCss; pub struct StyleCss;
impl StyleCss { impl StyleCss {
pub fn resource_name() -> &'static str { pub fn resource_name() -> &'static str { "style.css" }
"style.css"
}
} }
pub struct ScriptJs; pub struct ScriptJs;
impl ScriptJs { impl ScriptJs {
pub fn resource_name() -> &'static str { pub fn resource_name() -> &'static str { "script.js" }
"script.js"
}
} }
pub struct SearchJs; pub struct SearchJs;
impl SearchJs { impl SearchJs {
pub fn resource_name() -> &'static str { pub fn resource_name() -> &'static str { "search.js" }
"search.js"
}
} }
} }
#[cfg(feature = "dynamic-assets")] #[cfg(feature="dynamic-assets")]
pub use self::dynamic_assets::*; pub use self::dynamic_assets::*;

View file

@ -7,12 +7,12 @@ pub const PROJECT_NAME: &str = env!("CARGO_PKG_NAME");
const SOFT_HYPHEN: &str = "\u{00AD}"; const SOFT_HYPHEN: &str = "\u{00AD}";
#[cfg(all(not(debug_assertions), feature = "dynamic-assets"))] #[cfg(all(not(debug_assertions), feature="dynamic-assets"))]
compile_error!("dynamic-assets must not be used for production"); compile_error!("dynamic-assets must not be used for production");
lazy_static! { lazy_static! {
pub static ref VERSION: String = || -> String { pub static ref VERSION: String = || -> String {
let mut components = vec![]; let mut components = Vec::<String>::new();
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
components.push("debug".into()); components.push("debug".into());
@ -20,10 +20,10 @@ lazy_static! {
#[cfg(test)] #[cfg(test)]
components.push("test".into()); components.push("test".into());
#[cfg(feature = "dynamic-assets")] #[cfg(feature="dynamic-assets")]
components.push("dynamic-assets".into()); components.push("dynamic-assets".into());
if option_env!("CONTINUOUS_INTEGRATION").is_none() { if let None = option_env!("CONTINUOUS_INTEGRATION") {
components.push("local-build".into()); components.push("local-build".into());
} }
@ -32,22 +32,26 @@ lazy_static! {
} }
if let Some(commit) = option_env!("TRAVIS_COMMIT") { if let Some(commit) = option_env!("TRAVIS_COMMIT") {
components.push(format!( components.push(format!("commit:{}",
"commit:{}",
commit commit
.as_bytes() .as_bytes()
.chunks(4) .chunks(4)
.map(|x| String::from_utf8(x.to_owned()).unwrap_or_else(|_| String::new())) .map(|x|
String::from_utf8(x.to_owned())
.unwrap_or_else(|_| String::new())
)
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(SOFT_HYPHEN) .join(SOFT_HYPHEN)
)); ));
} }
if !components.is_empty() { if components.len() > 0 {
format!("{} ({})", env!("CARGO_PKG_VERSION"), components.join(" ")) format!("{} ({})", env!("CARGO_PKG_VERSION"), components.join(" "))
} else { } else {
env!("CARGO_PKG_VERSION").to_string() env!("CARGO_PKG_VERSION").to_string()
} }
}(); }();
pub static ref HTTP_SERVER: String = format!("{}/{}", PROJECT_NAME, VERSION.as_str());
pub static ref HTTP_SERVER: String =
format!("{}/{}", PROJECT_NAME, VERSION.as_str());
} }

View file

@ -0,0 +1,31 @@
use diesel;
use schema::article_revisions;
mod query_parameters;
mod resource;
mod scope;
pub use self::query_parameters::QueryParameters;
pub use self::scope::Scope;
pub use self::resource::Resource;
fn apply_query_config<'a>(
mut query: article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
)
-> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>
{
use diesel::prelude::*;
if let Some(article_id) = article_id {
query = query.filter(article_revisions::article_id.eq(article_id));
}
if let Some(author) = author {
query = query.filter(article_revisions::author.eq(author));
}
query.limit(limit as i64 + 1)
}

View file

@ -0,0 +1,50 @@
use serde_urlencoded;
use pagination::Pagination;
pub const DEFAULT_LIMIT: i32 = 30;
#[derive(Serialize, Deserialize, Default)]
pub struct QueryParameters {
pub after: Option<i32>,
pub before: Option<i32>,
pub article_id: Option<i32>,
pub author: Option<String>,
pub limit: Option<i32>,
}
impl QueryParameters {
pub fn pagination(self, pagination: Pagination<i32>) -> Self {
Self {
after: if let Pagination::After(x) = pagination { Some(x) } else { None },
before: if let Pagination::Before(x) = pagination { Some(x) } else { None },
..self
}
}
pub fn article_id(self, article_id: Option<i32>) -> Self {
Self { article_id, ..self }
}
pub fn author(self, author: Option<String>) -> Self {
Self { author, ..self }
}
pub fn limit(self, limit: i32) -> Self {
Self {
limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None },
..self
}
}
pub fn into_link(self) -> String {
let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail");
if args.len() > 0 {
format!("?{}", args)
} else {
"_changes".to_owned()
}
}
}

View file

@ -0,0 +1,219 @@
use futures::{self, Future};
use hyper;
use hyper::header::ContentType;
use hyper::server::*;
use mimes::*;
use pagination::Pagination;
use resources::DiffQueryParameters;
use schema::article_revisions;
use site::system_page;
use state::State;
use web;
use super::apply_query_config;
use super::query_parameters;
pub struct Resource {
state: State,
show_authors: bool,
before: Option<i32>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
}
impl Resource {
pub fn new(state: State, show_authors: bool, before: Option<i32>, article_id: Option<i32>, author: Option<String>, limit: i32) -> Self {
Resource { state, show_authors, before, article_id, author, limit }
}
fn query_args(&self) -> query_parameters::QueryParameters {
query_parameters::QueryParameters {
after: None,
before: self.before,
article_id: self.article_id,
author: self.author.clone(),
..query_parameters::QueryParameters::default()
}
.limit(self.limit)
}
}
impl web::Resource for Resource {
fn allow(&self) -> Vec<hyper::Method> {
use hyper::Method::*;
vec![Options, Head, Get]
}
fn head(&self) -> web::ResponseFuture {
Box::new(futures::finished(Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone()))
))
}
fn get(self: Box<Self>) -> web::ResponseFuture {
use chrono::{TimeZone, Local};
struct Row<'a> {
resource: &'a Resource,
sequence_number: i32,
article_id: i32,
revision: i32,
created: String,
author: Option<String>,
_slug: String,
title: String,
_latest: bool,
diff_link: Option<String>,
}
impl<'a> Row<'a> {
fn author_link(&self) -> String {
self.resource.query_args()
.pagination(Pagination::After(self.sequence_number))
.author(self.author.clone())
.into_link()
}
}
struct NavLinks {
more: String,
end: String,
}
#[derive(BartDisplay)]
#[template="templates/changes.html"]
struct Template<'a> {
resource: &'a Resource,
show_authors: bool,
newer: Option<NavLinks>,
older: Option<NavLinks>,
changes: &'a [Row<'a>],
}
impl<'a> Template<'a> {
fn subject_clause(&self) -> String {
match self.resource.article_id {
Some(x) => format!(" <a href=\"_by_id/{}\">this article</a>", x),
None => format!(" the wiki"),
}
}
fn author(&self) -> Option<String> {
self.resource.author.clone()
}
fn all_articles_link(&self) -> Option<String> {
self.resource.article_id.map(|_| {
self.resource.query_args()
.article_id(None)
.into_link()
})
}
fn all_authors_link(&self) -> Option<String> {
self.resource.author.as_ref().map(|_| {
self.resource.query_args()
.author(None)
.into_link()
})
}
}
let (before, article_id, author, limit) =
(self.before.clone(), self.article_id.clone(), self.author.clone(), self.limit);
let data = self.state.query_article_revision_stubs(move |query| {
use diesel::prelude::*;
let query = apply_query_config(query, article_id, author, limit)
.order(article_revisions::sequence_number.desc());
match before {
Some(x) => query.filter(article_revisions::sequence_number.lt(x)),
None => query,
}
});
let head = self.head();
Box::new(data.join(head)
.and_then(move |(mut data, head)| {
use std::iter::Iterator;
let extra_element = if data.len() > self.limit as usize {
data.pop()
} else {
None
};
let (newer, older) = match self.before {
Some(x) => (
Some(NavLinks {
more: self.query_args().pagination(Pagination::After(x-1)).into_link(),
end: self.query_args().pagination(Pagination::None).into_link(),
}),
extra_element.map(|_| NavLinks {
more: self.query_args()
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
.into_link(),
end: self.query_args().pagination(Pagination::After(0)).into_link(),
})
),
None => (
None,
extra_element.map(|_| NavLinks {
more: self.query_args()
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
.into_link(),
end: self.query_args().pagination(Pagination::After(0)).into_link(),
}),
),
};
let changes = &data.into_iter().map(|x| {
Row {
resource: &self,
sequence_number: x.sequence_number,
article_id: x.article_id,
revision: x.revision,
created: Local.from_utc_datetime(&x.created).to_rfc2822(),
author: x.author,
_slug: x.slug,
title: x.title,
_latest: x.latest,
diff_link:
if x.revision > 1 {
Some(format!("_diff/{}?{}",
x.article_id,
DiffQueryParameters::new(
x.revision as u32 - 1,
x.revision as u32,
)
))
} else {
None
},
}
}).collect::<Vec<_>>();
Ok(head.with_body(system_page(
None, // Hmm, should perhaps accept `base` as argument
"Changes",
Template {
resource: &self,
show_authors: self.show_authors,
newer,
older,
changes
}
).to_string()))
}))
}
}

View file

@ -0,0 +1,90 @@
use futures::Future;
use futures::future::{done, finished};
use serde_urlencoded;
use pagination::{self, Pagination};
use resources::TemporaryRedirectResource;
use schema::article_revisions;
use state::State;
use web;
use super::apply_query_config;
use super::query_parameters;
use super::Resource;
type BoxResource = Box<web::Resource + Sync + Send>;
#[derive(Clone)]
pub struct Scope {
state: State,
show_authors: bool,
}
impl Scope {
pub fn new(state: State, show_authors: bool) -> Scope {
Self { state, show_authors }
}
pub fn lookup(&self, query: Option<&str>) -> Box<Future<Item=Option<BoxResource>, Error=::web::Error>> {
let state = self.state.clone();
let show_authors = self.show_authors;
Box::new(
done((|| {
let params: query_parameters::QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?;
let pagination = pagination::from_fields(params.after, params.before)?;
let limit = match params.limit {
None => Ok(query_parameters::DEFAULT_LIMIT),
Some(x) if 1 <= x && x <= 100 => Ok(x),
_ => Err("`limit` argument must be in range [1, 100]"),
}?;
Ok((pagination, params.article_id, params.author, limit))
})())
.and_then(move |(pagination, article_id, author, limit)| match pagination {
Pagination::After(x) => {
let author2 = author.clone();
Box::new(state.query_article_revision_stubs(move |query| {
use diesel::prelude::*;
apply_query_config(query, article_id, author2, limit)
.filter(article_revisions::sequence_number.gt(x))
.order(article_revisions::sequence_number.asc())
}).and_then(move |mut data| {
let extra_element = if data.len() > limit as usize {
data.pop()
} else {
None
};
let args =
query_parameters::QueryParameters {
after: None,
before: None,
article_id,
author,
limit: None,
}
.limit(limit);
Ok(Some(match extra_element {
Some(x) => Box::new(TemporaryRedirectResource::new(
args
.pagination(Pagination::Before(x.sequence_number))
.into_link()
)) as BoxResource,
None => Box::new(TemporaryRedirectResource::new(
args.into_link()
)) as BoxResource,
}))
})) as Box<Future<Item=Option<BoxResource>, Error=::web::Error>>
},
Pagination::Before(x) => Box::new(finished(Some(Box::new(Resource::new(state, show_authors, Some(x), article_id, author, limit)) as BoxResource))),
Pagination::None => Box::new(finished(Some(Box::new(Resource::new(state, show_authors, None, article_id, author, limit)) as BoxResource))),
})
)
}
}

1
src/components/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod changes;

View file

@ -1,11 +1,10 @@
use diesel::expression::sql_literal::sql;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::expression::sql_literal::sql;
use diesel::sql_types::*; use diesel::sql_types::*;
use r2d2::{CustomizeConnection, Pool}; use r2d2::{CustomizeConnection, Pool};
use r2d2_diesel::{self, ConnectionManager}; use r2d2_diesel::{self, ConnectionManager};
use crate::rendering; use rendering;
use crate::theme;
embed_migrations!(); embed_migrations!();
@ -13,35 +12,27 @@ embed_migrations!();
struct SqliteInitializer; struct SqliteInitializer;
#[allow(dead_code)] #[allow(dead_code)]
pub mod sqlfunc { mod sqlfunc {
use diesel::sql_types::Text; use diesel::sql_types::Text;
sql_function!(fn markdown_to_fts(text: Text) -> Text); sql_function!(fn markdown_to_fts(text: Text) -> Text);
sql_function!(fn theme_from_str_hash(text: Text) -> Text);
} }
impl CustomizeConnection<SqliteConnection, r2d2_diesel::Error> for SqliteInitializer { impl CustomizeConnection<SqliteConnection, r2d2_diesel::Error> for SqliteInitializer {
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), r2d2_diesel::Error> { fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), r2d2_diesel::Error> {
sql::<Integer>("PRAGMA foreign_keys = ON") sql::<(Integer)>("PRAGMA foreign_keys = ON")
.execute(conn) .execute(conn)
.map_err(r2d2_diesel::Error::QueryError)?; .map_err(|x| r2d2_diesel::Error::QueryError(x))?;
sqlfunc::markdown_to_fts::register_impl(conn, |text: String| { sqlfunc::markdown_to_fts::register_impl(
rendering::render_markdown_for_fts(&text) conn,
}) |text: String| rendering::render_markdown_for_fts(&text)
.map_err(r2d2_diesel::Error::QueryError)?; ).map_err(|x| r2d2_diesel::Error::QueryError(x))?;
sqlfunc::theme_from_str_hash::register_impl(conn, |title: String| {
theme::theme_from_str_hash(&title)
})
.map_err(r2d2_diesel::Error::QueryError)?;
Ok(()) Ok(())
} }
} }
pub fn create_pool<S: Into<String>>( pub fn create_pool<S: Into<String>>(connection_string: S) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<::std::error::Error>> {
connection_string: S,
) -> Result<Pool<ConnectionManager<SqliteConnection>>, Box<dyn (::std::error::Error)>> {
let manager = ConnectionManager::<SqliteConnection>::new(connection_string); let manager = ConnectionManager::<SqliteConnection>::new(connection_string);
let pool = Pool::builder() let pool = Pool::builder()
.connection_customizer(Box::new(SqliteInitializer {})) .connection_customizer(Box::new(SqliteInitializer {}))
@ -62,48 +53,3 @@ pub fn test_connection() -> SqliteConnection {
conn conn
} }
#[cfg(test)]
mod test {
use super::*;
use diesel::sql_query;
#[test]
fn markdown_to_fts() {
let conn = test_connection();
#[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row {
#[sql_type = "Text"]
text: String,
}
let res = sql_query("SELECT markdown_to_fts('[link](url)') as text")
.load::<Row>(&conn)
.unwrap();
let expected = rendering::render_markdown_for_fts("[link](url)");
assert_eq!(expected, res[0].text);
}
#[test]
fn theme_from_str_hash() {
let conn = test_connection();
#[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row {
#[sql_type = "Text"]
theme: theme::Theme,
}
let res = sql_query("SELECT theme_from_str_hash('Bartefjes') as theme")
.load::<Row>(&conn)
.unwrap();
let expected = theme::theme_from_str_hash("Bartefjes");
assert_eq!(expected, res[0].theme);
}
}

View file

@ -1,67 +1,64 @@
#![allow(clippy::into_iter_on_ref)] #![recursion_limit="128"] // for diesel's infer_schema!
#![allow(clippy::vec_init_then_push)]
#![recursion_limit = "128"]
// for diesel's infer_schema!
#[cfg(test)] #[cfg(test)] #[macro_use] extern crate matches;
#[macro_use] #[cfg(test)] #[macro_use] extern crate indoc;
extern crate matches;
#[macro_use] #[macro_use] extern crate bart_derive;
extern crate bart_derive; #[macro_use] extern crate codegen;
#[macro_use] #[macro_use] #[allow(deprecated)] extern crate diesel_infer_schema;
extern crate codegen; #[macro_use] extern crate diesel_migrations;
#[macro_use] #[macro_use] extern crate diesel;
#[allow(clippy::useless_attribute)] #[macro_use] extern crate hyper;
#[allow(deprecated)] #[macro_use] extern crate lazy_static;
extern crate diesel_infer_schema; #[macro_use] extern crate maplit;
#[macro_use] #[macro_use] extern crate serde_derive;
extern crate diesel_migrations;
#[macro_use] extern crate chrono;
extern crate diesel; extern crate diff;
#[macro_use] extern crate futures_cpupool;
extern crate hyper; extern crate futures;
#[macro_use] extern crate percent_encoding;
extern crate lazy_static; extern crate pulldown_cmark;
#[macro_use] extern crate r2d2_diesel;
extern crate maplit; extern crate r2d2;
#[macro_use] extern crate seahash;
extern crate serde_derive; extern crate serde_json;
#[macro_use] extern crate serde_urlencoded;
extern crate serde_plain; extern crate serde;
extern crate slug;
extern crate titlecase;
use std::net::{IpAddr, SocketAddr}; use std::net::{IpAddr, SocketAddr};
mod assets; mod assets;
mod build_config; mod build_config;
mod components;
mod db; mod db;
mod merge; mod merge;
mod mimes; mod mimes;
mod models; mod models;
mod pagination;
mod rendering; mod rendering;
mod resources; mod resources;
mod schema; mod schema;
mod site; mod site;
mod state; mod state;
mod theme;
mod web; mod web;
mod wiki_lookup; mod wiki_lookup;
pub fn main( pub fn main(db_file: String, bind_host: IpAddr, bind_port: u16, trust_identity: bool) -> Result<(), Box<std::error::Error>> {
db_file: String,
bind_host: IpAddr,
bind_port: u16,
trust_identity: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let db_pool = db::create_pool(db_file)?; let db_pool = db::create_pool(db_file)?;
let cpu_pool = futures_cpupool::CpuPool::new_num_cpus(); let cpu_pool = futures_cpupool::CpuPool::new_num_cpus();
let state = state::State::new(db_pool, cpu_pool); let state = state::State::new(db_pool, cpu_pool);
let lookup = wiki_lookup::WikiLookup::new(state, trust_identity); let lookup = wiki_lookup::WikiLookup::new(state, trust_identity);
let server = hyper::server::Http::new() let server =
.bind(&SocketAddr::new(bind_host, bind_port), move || { hyper::server::Http::new()
Ok(site::Site::new(lookup.clone(), trust_identity)) .bind(
})?; &SocketAddr::new(bind_host, bind_port),
move || Ok(site::Site::new(lookup.clone(), trust_identity))
)?;
println!("Listening on http://{}", server.local_addr().unwrap()); println!("Listening on http://{}", server.local_addr().unwrap());

View file

@ -1,10 +1,11 @@
#[macro_use] #[macro_use] extern crate lazy_static;
extern crate lazy_static; extern crate clap;
extern crate sausagewiki;
use std::net::IpAddr; use std::net::IpAddr;
mod build_config; mod build_config;
use crate::build_config::*; use build_config::*;
const DATABASE: &str = "DATABASE"; const DATABASE: &str = "DATABASE";
const TRUST_IDENTITY: &str = "trust-identity"; const TRUST_IDENTITY: &str = "trust-identity";
@ -17,61 +18,52 @@ fn args<'a>() -> clap::ArgMatches<'a> {
App::new(PROJECT_NAME) App::new(PROJECT_NAME)
.version(VERSION.as_str()) .version(VERSION.as_str())
.about(env!("CARGO_PKG_DESCRIPTION")) .about(env!("CARGO_PKG_DESCRIPTION"))
.arg( .arg(Arg::with_name(DATABASE)
Arg::with_name(DATABASE)
.help("Sets the database file to use") .help("Sets the database file to use")
.required(true), .required(true))
) .arg(Arg::with_name(PORT)
.arg(
Arg::with_name(PORT)
.help("Sets the listening port") .help("Sets the listening port")
.short("p") .short("p")
.long(PORT) .long(PORT)
.default_value("8080") .default_value("8080")
.validator(|x| match x.parse::<u16>() { .validator(|x| match x.parse::<u16>() {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(_) => Err("Must be an integer in the range [0, 65535]".into()), Err(_) => Err("Must be an integer in the range [0, 65535]".into())
}) })
.takes_value(true), .takes_value(true))
) .arg(Arg::with_name(ADDRESS)
.arg(
Arg::with_name(ADDRESS)
.help("Sets the IP address to bind to") .help("Sets the IP address to bind to")
.short("a") .short("a")
.long(ADDRESS) .long(ADDRESS)
.default_value("127.0.0.1") .default_value("127.0.0.1")
.validator(|x| match x.parse::<IpAddr>() { .validator(|x| match x.parse::<IpAddr>() {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(_) => Err("Must be a valid IP address".into()), Err(_) => Err("Must be a valid IP address".into())
}) })
.takes_value(true), .takes_value(true))
) .arg(Arg::with_name(TRUST_IDENTITY)
.arg( .help("Trust the value in the X-Identity header to be an \
Arg::with_name(TRUST_IDENTITY)
.help(
"Trust the value in the X-Identity header to be an \
authenticated username. This only makes sense when Sausagewiki \ authenticated username. This only makes sense when Sausagewiki \
runs behind a reverse proxy which sets this header.", runs behind a reverse proxy which sets this header.")
) .long(TRUST_IDENTITY))
.long(TRUST_IDENTITY),
)
.get_matches() .get_matches()
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<std::error::Error>> {
let args = args(); let args = args();
const CLAP: &str = "Guaranteed by clap"; const CLAP: &str = "Guaranteed by clap";
const VALIDATOR: &str = "Guaranteed by clap validator"; const VALIDATOR: &str = "Guaranteed by clap validator";
let db_file = args.value_of(DATABASE).expect(CLAP).to_owned(); let db_file = args.value_of(DATABASE).expect(CLAP).to_owned();
let bind_host = args let bind_host = args.value_of(ADDRESS).expect(CLAP).parse().expect(VALIDATOR);
.value_of(ADDRESS)
.expect(CLAP)
.parse()
.expect(VALIDATOR);
let bind_port = args.value_of(PORT).expect(CLAP).parse().expect(VALIDATOR); let bind_port = args.value_of(PORT).expect(CLAP).parse().expect(VALIDATOR);
let trust_identity = args.is_present(TRUST_IDENTITY); let trust_identity = args.is_present(TRUST_IDENTITY);
sausagewiki::main(db_file, bind_host, bind_port, trust_identity) sausagewiki::main(
db_file,
bind_host,
bind_port,
trust_identity,
)
} }

View file

@ -1,7 +1,8 @@
use std::fmt::Debug; use std::fmt::Debug;
use diff;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct Chunk<'a, Item: 'a + Debug + PartialEq + Copy>( pub struct Chunk<'a, Item: 'a + Debug + PartialEq + Copy>(
pub &'a [diff::Result<Item>], pub &'a [diff::Result<Item>],
pub &'a [diff::Result<Item>], pub &'a [diff::Result<Item>]
); );

View file

@ -1,12 +1,13 @@
use std::fmt::Debug; use std::fmt::Debug;
use diff;
use diff::Result::*; use diff::Result::*;
use super::chunk::Chunk; use super::chunk::Chunk;
pub struct ChunkIterator<'a, Item> pub struct ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq, Item: 'a + Debug + PartialEq
{ {
left: &'a [diff::Result<Item>], left: &'a [diff::Result<Item>],
right: &'a [diff::Result<Item>], right: &'a [diff::Result<Item>],
@ -14,19 +15,16 @@ where
impl<'a, Item> ChunkIterator<'a, Item> impl<'a, Item> ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq + Eq, Item: 'a + Debug + PartialEq + Eq
{ {
pub fn new( pub fn new(left: &'a [diff::Result<Item>], right: &'a [diff::Result<Item>]) -> ChunkIterator<'a, Item> {
left: &'a [diff::Result<Item>],
right: &'a [diff::Result<Item>],
) -> ChunkIterator<'a, Item> {
ChunkIterator { left, right } ChunkIterator { left, right }
} }
} }
impl<'a, Item> Iterator for ChunkIterator<'a, Item> impl<'a, Item> Iterator for ChunkIterator<'a, Item>
where where
Item: 'a + Debug + PartialEq + Copy, Item: 'a + Debug + PartialEq + Copy
{ {
type Item = Chunk<'a, Item>; type Item = Chunk<'a, Item>;
@ -48,18 +46,18 @@ where
match (self.left.get(li), self.right.get(ri)) { match (self.left.get(li), self.right.get(ri)) {
(Some(&Right(_)), _) => { (Some(&Right(_)), _) => {
li += 1; li += 1;
} },
(_, Some(&Right(_))) => { (_, Some(&Right(_))) => {
ri += 1; ri += 1;
} },
(Some(&Left(_)), Some(_)) => { (Some(&Left(_)), Some(_)) => {
li += 1; li += 1;
ri += 1; ri += 1;
} },
(Some(_), Some(&Left(_))) => { (Some(_), Some(&Left(_))) => {
li += 1; li += 1;
ri += 1; ri += 1;
} },
(Some(&Both(..)), Some(&Both(..))) => { (Some(&Both(..)), Some(&Both(..))) => {
let chunk = Chunk(&self.left[..li], &self.right[..ri]); let chunk = Chunk(&self.left[..li], &self.right[..ri]);
self.left = &self.left[li..]; self.left = &self.left[li..];
@ -67,7 +65,7 @@ where
return Some(chunk); return Some(chunk);
} }
_ => { _ => {
if !self.left.is_empty() || !self.right.is_empty() { if self.left.len() > 0 || self.right.len() > 0 {
let chunk = Chunk(self.left, self.right); let chunk = Chunk(self.left, self.right);
self.left = &self.left[self.left.len()..]; self.left = &self.left[self.left.len()..];
self.right = &self.right[self.right.len()..]; self.right = &self.right[self.right.len()..];
@ -83,6 +81,7 @@ where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use diff;
#[test] #[test]
fn simple_case() { fn simple_case() {
@ -95,16 +94,13 @@ mod test {
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!( assert_eq!(vec![
vec![ Chunk(&oa[0.. 3], &ob[0.. 3]),
Chunk(&oa[0..3], &ob[0..3]), Chunk(&oa[3.. 6], &ob[3.. 3]),
Chunk(&oa[3..6], &ob[3..3]), Chunk(&oa[6.. 9], &ob[3.. 6]),
Chunk(&oa[6..9], &ob[3..6]), Chunk(&oa[9.. 9], &ob[6.. 9]),
Chunk(&oa[9..9], &ob[6..9]),
Chunk(&oa[9..12], &ob[9..12]), Chunk(&oa[9..12], &ob[9..12]),
], ], chunks);
chunks
);
} }
#[test] #[test]
@ -117,14 +113,11 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!( assert_eq!(vec![
vec![ Chunk(&oa[0.. 3], &ob[0.. 3]),
Chunk(&oa[0..3], &ob[0..3]), Chunk(&oa[3.. 9], &ob[3.. 9]),
Chunk(&oa[3..9], &ob[3..9]),
Chunk(&oa[9..12], &ob[9..12]), Chunk(&oa[9..12], &ob[9..12]),
], ], chunks);
chunks
);
} }
#[test] #[test]
@ -137,10 +130,10 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!( assert_eq!(vec![
vec![Chunk(&oa[0..9], &ob[0..9]), Chunk(&oa[9..9], &ob[9..12]),], Chunk(&oa[0..9], &ob[0.. 9]),
chunks Chunk(&oa[9..9], &ob[9..12]),
); ], chunks);
} }
#[test] #[test]
@ -153,10 +146,10 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!( assert_eq!(vec![
vec![Chunk(&oa[0..6], &ob[0..6]), Chunk(&oa[6..9], &ob[6..12]),], Chunk(&oa[0..6], &ob[0.. 6]),
chunks Chunk(&oa[6..9], &ob[6..12]),
); ], chunks);
} }
#[test] #[test]
@ -169,6 +162,8 @@ mod test {
let ob = diff::chars(o, b); let ob = diff::chars(o, b);
let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>(); let chunks = ChunkIterator::new(&oa, &ob).collect::<Vec<_>>();
assert_eq!(vec![Chunk(&oa[0..6], &ob[0..6]),], chunks); assert_eq!(vec![
Chunk(&oa[0..6], &ob[0..6]),
], chunks);
} }
} }

View file

@ -1,12 +1,14 @@
mod chunk;
mod chunk_iterator; mod chunk_iterator;
mod chunk;
mod output; mod output;
use std::fmt::Debug; use std::fmt::Debug;
use diff;
use self::chunk_iterator::ChunkIterator; use self::chunk_iterator::ChunkIterator;
use self::output::Output::Resolved;
use self::output::*; use self::output::*;
use self::output::Output::Resolved;
pub use self::output::Output; pub use self::output::Output;
@ -17,12 +19,12 @@ pub enum MergeResult<Item: Debug + PartialEq> {
} }
impl<'a> MergeResult<&'a str> { impl<'a> MergeResult<&'a str> {
pub fn into_strings(self) -> MergeResult<String> { pub fn to_strings(self) -> MergeResult<String> {
match self { match self {
MergeResult::Clean(x) => MergeResult::Clean(x), MergeResult::Clean(x) => MergeResult::Clean(x),
MergeResult::Conflicted(x) => { MergeResult::Conflicted(x) => MergeResult::Conflicted(
MergeResult::Conflicted(x.into_iter().map(Output::into_strings).collect()) x.into_iter().map(Output::to_strings).collect()
} )
} }
} }
} }
@ -31,8 +33,8 @@ impl MergeResult<String> {
pub fn flatten(self) -> String { pub fn flatten(self) -> String {
match self { match self {
MergeResult::Clean(x) => x, MergeResult::Clean(x) => x,
MergeResult::Conflicted(x) => x MergeResult::Conflicted(x) => {
.into_iter() x.into_iter()
.flat_map(|out| match out { .flat_map(|out| match out {
Output::Conflict(a, _o, b) => { Output::Conflict(a, _o, b) => {
let mut x: Vec<String> = vec![]; let mut x: Vec<String> = vec![];
@ -42,10 +44,12 @@ impl MergeResult<String> {
x.extend(b.into_iter().map(|x| format!("{}\n", x))); x.extend(b.into_iter().map(|x| format!("{}\n", x)));
x.push(">>>>>>> Conflict ends here\n".into()); x.push(">>>>>>> Conflict ends here\n".into());
x x
} },
Output::Resolved(x) => x.into_iter().map(|x| format!("{}\n", x)).collect(), Output::Resolved(x) =>
x.into_iter().map(|x| format!("{}\n", x)).collect(),
}) })
.collect(), .collect()
}
} }
} }
} }
@ -54,8 +58,8 @@ impl MergeResult<char> {
pub fn flatten(self) -> String { pub fn flatten(self) -> String {
match self { match self {
MergeResult::Clean(x) => x, MergeResult::Clean(x) => x,
MergeResult::Conflicted(x) => x MergeResult::Conflicted(x) => {
.into_iter() x.into_iter()
.flat_map(|out| match out { .flat_map(|out| match out {
Output::Conflict(a, _o, b) => { Output::Conflict(a, _o, b) => {
let mut x: Vec<char> = vec![]; let mut x: Vec<char> = vec![];
@ -65,10 +69,11 @@ impl MergeResult<char> {
x.extend(b); x.extend(b);
x.push('>'); x.push('>');
x x
} },
Output::Resolved(x) => x, Output::Resolved(x) => x,
}) })
.collect(), .collect()
}
} }
} }
} }
@ -80,7 +85,7 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
let chunks = ChunkIterator::new(&oa, &ob); let chunks = ChunkIterator::new(&oa, &ob);
let hunks: Vec<_> = chunks.map(resolve).collect(); let hunks: Vec<_> = chunks.map(resolve).collect();
let clean = hunks.iter().all(|x| matches!(x, Resolved(..))); let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false });
if clean { if clean {
MergeResult::Clean( MergeResult::Clean(
@ -88,10 +93,10 @@ pub fn merge_lines<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<&'a st
.into_iter() .into_iter()
.flat_map(|x| match x { .flat_map(|x| match x {
Resolved(y) => y.into_iter(), Resolved(y) => y.into_iter(),
_ => unreachable!(), _ => unreachable!()
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n"), .join("\n")
) )
} else { } else {
MergeResult::Conflicted(hunks) MergeResult::Conflicted(hunks)
@ -105,7 +110,7 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
let chunks = ChunkIterator::new(&oa, &ob); let chunks = ChunkIterator::new(&oa, &ob);
let hunks: Vec<_> = chunks.map(resolve).collect(); let hunks: Vec<_> = chunks.map(resolve).collect();
let clean = hunks.iter().all(|x| matches!(x, Resolved(..))); let clean = hunks.iter().all(|x| match x { &Resolved(..) => true, _ => false });
if clean { if clean {
MergeResult::Clean( MergeResult::Clean(
@ -113,9 +118,9 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
.into_iter() .into_iter()
.flat_map(|x| match x { .flat_map(|x| match x {
Resolved(y) => y.into_iter(), Resolved(y) => y.into_iter(),
_ => unreachable!(), _ => unreachable!()
}) })
.collect(), .collect()
) )
} else { } else {
MergeResult::Conflicted(hunks) MergeResult::Conflicted(hunks)
@ -124,11 +129,11 @@ pub fn merge_chars<'a>(a: &'a str, o: &'a str, b: &'a str) -> MergeResult<char>
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use indoc::indoc; use diff;
use super::output::Output::*;
use super::output::*;
use super::*; use super::*;
use super::output::*;
use super::output::Output::*;
#[test] #[test]
fn simple_case() { fn simple_case() {
@ -140,141 +145,106 @@ mod test {
chunks.map(resolve).collect() chunks.map(resolve).collect()
} }
assert_eq!( assert_eq!(vec![
vec![
Resolved("aaa".chars().collect()), Resolved("aaa".chars().collect()),
Resolved("xxx".chars().collect()), Resolved("xxx".chars().collect()),
Resolved("bbb".chars().collect()), Resolved("bbb".chars().collect()),
Resolved("yyy".chars().collect()), Resolved("yyy".chars().collect()),
Resolved("ccc".chars().collect()), Resolved("ccc".chars().collect()),
], ], merge_chars(
merge_chars("aaaxxxbbbccc", "aaabbbccc", "aaabbbyyyccc",) "aaaxxxbbbccc",
); "aaabbbccc",
"aaabbbyyyccc",
));
} }
#[test] #[test]
fn clean_case() { fn clean_case() {
assert_eq!( assert_eq!(MergeResult::Clean(indoc!("
MergeResult::Clean(
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
yyy yyy
ccc ccc
" ").into()), merge_lines(
) indoc!("
.into()
),
merge_lines(
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
bbb bbb
yyy yyy
ccc ccc
" "),
), ));
)
);
} }
#[test] #[test]
fn clean_case_chars() { fn clean_case_chars() {
assert_eq!( assert_eq!(MergeResult::Clean("Title".into()), merge_chars(
MergeResult::Clean("Title".into()), "Titlle",
merge_chars("Titlle", "titlle", "title",) "titlle",
); "title",
));
} }
#[test] #[test]
fn false_conflict() { fn false_conflict() {
assert_eq!( assert_eq!(MergeResult::Clean(indoc!("
MergeResult::Clean(
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
" ").into()), merge_lines(
) indoc!("
.into()
),
merge_lines(
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
xxx xxx
ccc ccc
" "),
), ));
)
);
} }
#[test] #[test]
fn true_conflict() { fn true_conflict() {
assert_eq!( assert_eq!(MergeResult::Conflicted(vec![
MergeResult::Conflicted(vec![
Resolved(vec!["aaa"]), Resolved(vec!["aaa"]),
Conflict(vec!["xxx"], vec![], vec!["yyy"]), Conflict(vec!["xxx"], vec![], vec!["yyy"]),
Resolved(vec!["bbb", "ccc", ""]), Resolved(vec!["bbb", "ccc", ""]),
]), ]), merge_lines(
merge_lines( indoc!("
indoc!(
"
aaa aaa
xxx xxx
bbb bbb
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
bbb bbb
ccc ccc
" "),
), indoc!("
indoc!(
"
aaa aaa
yyy yyy
bbb bbb
ccc ccc
" "),
), ));
)
);
} }
} }

View file

@ -1,5 +1,6 @@
use std::fmt::Debug; use std::fmt::Debug;
use diff;
use diff::Result::*; use diff::Result::*;
use super::chunk::Chunk; use super::chunk::Chunk;
@ -11,7 +12,7 @@ pub enum Output<Item: Debug + PartialEq> {
} }
impl<'a> Output<&'a str> { impl<'a> Output<&'a str> {
pub fn into_strings(self) -> Output<String> { pub fn to_strings(self) -> Output<String> {
match self { match self {
Output::Resolved(x) => Output::Resolved(x.into_iter().map(str::to_string).collect()), Output::Resolved(x) => Output::Resolved(x.into_iter().map(str::to_string).collect()),
Output::Conflict(a, o, b) => Output::Conflict( Output::Conflict(a, o, b) => Output::Conflict(
@ -26,10 +27,10 @@ impl<'a> Output<&'a str> {
fn choose_left<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> { fn choose_left<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> {
operations operations
.iter() .iter()
.filter_map(|x| match *x { .filter_map(|x| match x {
Both(y, _) => Some(y), &Both(y, _) => Some(y),
Left(y) => Some(y), &Left(y) => Some(y),
Right(_) => None, &Right(_) => None,
}) })
.collect() .collect()
} }
@ -37,16 +38,21 @@ fn choose_left<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> {
fn choose_right<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> { fn choose_right<Item: Copy>(operations: &[diff::Result<Item>]) -> Vec<Item> {
operations operations
.iter() .iter()
.filter_map(|x| match *x { .filter_map(|x| match x {
Both(_, y) => Some(y), &Both(_, y) => Some(y),
Left(_) => None, &Left(_) => None,
Right(y) => Some(y), &Right(y) => Some(y),
}) })
.collect() .collect()
} }
fn no_change<Item>(operations: &[diff::Result<Item>]) -> bool { fn no_change<Item>(operations: &[diff::Result<Item>]) -> bool {
operations.iter().all(|x| matches!(x, Both(..))) operations
.iter()
.all(|x| match x {
&Both(..) => true,
_ => false,
})
} }
pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>) -> Output<Item> { pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>) -> Output<Item> {
@ -63,51 +69,92 @@ pub fn resolve<'a, Item: 'a + Debug + PartialEq + Copy>(chunk: Chunk<'a, Item>)
return Output::Resolved(choose_right(chunk.0)); return Output::Resolved(choose_right(chunk.0));
} }
Output::Conflict( return Output::Conflict(
choose_right(chunk.0), choose_right(chunk.0),
choose_left(chunk.0), choose_left(chunk.0),
choose_right(chunk.1), choose_right(chunk.1),
) );
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use diff::Result::*;
use super::*; use super::*;
#[test] #[test]
fn empty() { fn empty() {
assert_eq!(Output::Resolved(vec![]), resolve::<i32>(Chunk(&[], &[]))); assert_eq!(
Output::Resolved(vec![]),
resolve::<i32>(Chunk(&[], &[]))
);
} }
#[test] #[test]
fn same() { fn same() {
assert_eq!( assert_eq!(
Output::Resolved(vec![1]), Output::Resolved(vec![
resolve::<i32>(Chunk(&[Both(1, 1)], &[Both(1, 1)])) 1
]),
resolve::<i32>(Chunk(
&[Both(1, 1)],
&[Both(1, 1)]
))
); );
} }
#[test] #[test]
fn only_left() { fn only_left() {
assert_eq!( assert_eq!(
Output::Resolved(vec![2]), Output::Resolved(vec![
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[])) 2
]),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[]
))
); );
} }
#[test] #[test]
fn false_conflict() { fn false_conflict() {
assert_eq!( assert_eq!(
Output::Resolved(vec![2]), Output::Resolved(vec![
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(2)],)) 2
]),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[
Left(1),
Right(2)
],
))
); );
} }
#[test] #[test]
fn real_conflict() { fn real_conflict() {
assert_eq!( assert_eq!(
Output::Conflict(vec![2], vec![1], vec![3],), Output::Conflict(
resolve::<i32>(Chunk(&[Left(1), Right(2)], &[Left(1), Right(3)],)) vec![2],
vec![1],
vec![3],
),
resolve::<i32>(Chunk(
&[
Left(1),
Right(2)
],
&[
Left(1),
Right(3)
],
))
); );
} }
} }

View file

@ -1,4 +1,4 @@
use crate::theme::Theme; use chrono;
fn slug_link(slug: &str) -> &str { fn slug_link(slug: &str) -> &str {
if slug.is_empty() { if slug.is_empty() {
@ -23,14 +23,10 @@ pub struct ArticleRevision {
pub latest: bool, pub latest: bool,
pub author: Option<String>, pub author: Option<String>,
pub theme: Theme,
} }
impl ArticleRevision { impl ArticleRevision {
pub fn link(&self) -> &str { pub fn link(&self) -> &str { slug_link(&self.slug) }
slug_link(&self.slug)
}
} }
#[derive(Debug, PartialEq, Queryable)] #[derive(Debug, PartialEq, Queryable)]
@ -47,14 +43,10 @@ pub struct ArticleRevisionStub {
pub latest: bool, pub latest: bool,
pub author: Option<String>, pub author: Option<String>,
pub theme: Theme,
} }
impl ArticleRevisionStub { impl ArticleRevisionStub {
pub fn link(&self) -> &str { pub fn link(&self) -> &str { slug_link(&self.slug) }
slug_link(&self.slug)
}
} }
use diesel::sql_types::Text; use diesel::sql_types::Text;
@ -71,7 +63,5 @@ pub struct SearchResult {
} }
impl SearchResult { impl SearchResult {
pub fn link(&self) -> &str { pub fn link(&self) -> &str { slug_link(&self.slug) }
slug_link(&self.slug)
}
} }

View file

@ -8,11 +8,15 @@ pub struct Error;
impl fmt::Display for Error { impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "`after` and `before` are mutually exclusive") write!(f, "{}", (self as &error::Error).description())
} }
} }
impl error::Error for Error {} impl error::Error for Error {
fn description(&self) -> &str {
"`after` and `before` are mutually exclusive"
}
}
#[derive(Deserialize)] #[derive(Deserialize)]
struct PaginationStruct<T> { struct PaginationStruct<T> {
@ -33,16 +37,16 @@ impl<T> PaginationStruct<T> {
(Some(x), None) => Ok(Pagination::After(x)), (Some(x), None) => Ok(Pagination::After(x)),
(None, Some(x)) => Ok(Pagination::Before(x)), (None, Some(x)) => Ok(Pagination::Before(x)),
(None, None) => Ok(Pagination::None), (None, None) => Ok(Pagination::None),
_ => Err(Error), _ => Err(Error)
} }
} }
} }
pub fn _from_str<'a, T: serde::Deserialize<'a>>(s: &'a str) -> Result<Pagination<T>, Error> { pub fn _from_str<'a, T: serde::Deserialize<'a>>(s: &'a str) -> Result<Pagination<T>, Error> {
let pagination: PaginationStruct<T> = serde_urlencoded::from_str(s).map_err(|_| Error)?; // TODO Proper error reporting let pagination: PaginationStruct<T> = serde_urlencoded::from_str(s).map_err(|_| Error)?; // TODO Proper error reporting
pagination.into_enum() Ok(pagination.into_enum()?)
} }
pub fn from_fields<T>(after: Option<T>, before: Option<T>) -> Result<Pagination<T>, Error> { pub fn from_fields<T>(after: Option<T>, before: Option<T>) -> Result<Pagination<T>, Error> {
PaginationStruct { after, before }.into_enum() Ok(PaginationStruct { after, before }.into_enum()?)
} }

View file

@ -1,18 +1,9 @@
use pulldown_cmark::Event::{End, Text}; use pulldown_cmark::{Parser, Tag, html, OPTION_ENABLE_TABLES, OPTION_DISABLE_HTML};
use pulldown_cmark::{html, Parser, Tag, OPTION_DISABLE_HTML, OPTION_ENABLE_TABLES}; use pulldown_cmark::Event::{Text, End};
use slug::slugify;
fn slugify_link(text: &str, title: &str) -> Option<(String, String)> {
Some((slugify(text), title.to_owned()))
}
fn parser(src: &str) -> Parser {
let opts = OPTION_ENABLE_TABLES | OPTION_DISABLE_HTML;
Parser::new_with_broken_link_callback(src, opts, Some(&slugify_link))
}
pub fn render_markdown(src: &str) -> String { pub fn render_markdown(src: &str) -> String {
let p = parser(src); let opts = OPTION_ENABLE_TABLES | OPTION_DISABLE_HTML;
let p = Parser::new_ext(src, opts);
let mut buf = String::new(); let mut buf = String::new();
html::push_html(&mut buf, p); html::push_html(&mut buf, p);
buf buf
@ -23,43 +14,22 @@ fn is_html_special(c: char) -> bool {
} }
pub fn render_markdown_for_fts(src: &str) -> String { pub fn render_markdown_for_fts(src: &str) -> String {
let p = parser(src); let opts = OPTION_ENABLE_TABLES | OPTION_DISABLE_HTML;
let p = Parser::new_ext(src, opts);
let mut buf = String::new(); let mut buf = String::new();
for event in p { for event in p {
match event { match event {
// As far as I understand this is a basic Text(text) =>
// sanitizing to prevent HTML from buf.push_str(&text.replace(is_html_special, " ")),
// appearing in page.
Text(text) => buf.push_str(&text.replace(is_html_special, " ")),
// Footnote links maybe?
End(Tag::Link(uri, _title)) => { End(Tag::Link(uri, _title)) => {
buf.push_str(" ("); buf.push_str(" (");
buf.push_str(&uri.replace(is_html_special, " ")); buf.push_str(&uri.replace(is_html_special, " "));
buf.push_str(") "); buf.push_str(") ");
} }
_ => buf.push(' '), _ => buf.push_str(" "),
} }
} }
buf buf
} }
#[cfg(test)]
mod test {
use super::*;
#[test]
fn slug_link() {
let actual = render_markdown("[Slug link]");
let expected = "<p><a href=\"slug-link\" title=\"Slug link\">Slug link</a></p>\n";
assert_eq!(actual, expected);
}
#[test]
fn footnote_links() {
let actual = render_markdown("[Link]\n\n[Link]: target");
let expected = "<p><a href=\"target\">Link</a></p>\n";
assert_eq!(actual, expected);
}
}

View file

@ -1,12 +1,12 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
use crate::build_config; use build_config;
use crate::mimes::*; use mimes::*;
use crate::site::system_page; use site::system_page;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
#[derive(Licenses)] #[derive(Licenses)]
pub struct AboutResource; pub struct AboutResource;
@ -28,20 +28,20 @@ impl License {
fn link(&self) -> &'static str { fn link(&self) -> &'static str {
use self::License::*; use self::License::*;
match self { match self {
Bsd3Clause => "bsd-3-clause", &Bsd3Clause => "bsd-3-clause",
Mit => "mit", &Mit => "mit",
Mpl2 => "mpl2", &Mpl2 => "mpl2",
Ofl11 => "sil-ofl-1.1", &Ofl11 => "sil-ofl-1.1",
} }
} }
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
use self::License::*; use self::License::*;
match self { match self {
Bsd3Clause => "BSD-3-Clause", &Bsd3Clause => "BSD-3-Clause",
Mit => "MIT", &Mit => "MIT",
Mpl2 => "MPL2", &Mpl2 => "MPL2",
Ofl11 => "OFL-1.1", &Ofl11 => "OFL-1.1",
} }
} }
} }
@ -54,15 +54,13 @@ struct LicenseInfo {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/about.html"] #[template="templates/about.html"]
struct Template<'a> { struct Template<'a> {
deps: &'a [LicenseInfo], deps: &'a [LicenseInfo]
} }
impl<'a> Template<'a> { impl<'a> Template<'a> {
fn version(&self) -> &str { fn version(&self) -> &str { &build_config::VERSION }
&build_config::VERSION
}
} }
impl Resource for AboutResource { impl Resource for AboutResource {
@ -72,27 +70,24 @@ impl Resource for AboutResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let head = self.head(); let head = self.head();
Box::new(head.and_then(move |head| { Box::new(head
Ok(head.with_body( .and_then(move |head| {
system_page( Ok(head.with_body(system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"About Sausagewiki", "About Sausagewiki",
Template { Template {
deps: *LICENSE_INFOS, deps: &*LICENSE_INFOS
}, },
) ).to_string()))
.to_string(),
))
})) }))
} }
} }

View file

@ -1,26 +1,22 @@
use chrono::{DateTime, Local, TimeZone}; use chrono::{TimeZone, DateTime, Local};
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::{ContentType, Location}; use hyper::header::{ContentType, Location};
use hyper::server::*; use hyper::server::*;
use serde_json;
use serde_urlencoded;
use crate::assets::ScriptJs; use assets::ScriptJs;
use crate::mimes::*; use mimes::*;
use crate::rendering::render_markdown; use rendering::render_markdown;
use crate::site::Layout; use site::Layout;
use crate::state::{RebaseConflict, State, UpdateResult}; use state::{State, UpdateResult, RebaseConflict};
use crate::theme::{self, Theme}; use web::{Resource, ResponseFuture};
use crate::web::{Resource, ResponseFuture};
use super::changes_resource::QueryParameters; use components::changes::QueryParameters;
struct SelectableTheme {
theme: Theme,
selected: bool,
}
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/article.html"] #[template="templates/article.html"]
struct Template<'a> { struct Template<'a> {
revision: i32, revision: i32,
last_updated: Option<&'a str>, last_updated: Option<&'a str>,
@ -30,7 +26,6 @@ struct Template<'a> {
title: &'a str, title: &'a str,
raw: &'a str, raw: &'a str,
rendered: String, rendered: String,
themes: &'a [SelectableTheme],
} }
impl<'a> Template<'a> { impl<'a> Template<'a> {
@ -44,7 +39,6 @@ struct UpdateArticle {
base_revision: i32, base_revision: i32,
title: String, title: String,
body: String, body: String,
theme: Option<Theme>,
} }
pub struct ArticleResource { pub struct ArticleResource {
@ -56,12 +50,7 @@ pub struct ArticleResource {
impl ArticleResource { impl ArticleResource {
pub fn new(state: State, article_id: i32, revision: i32, edit: bool) -> Self { pub fn new(state: State, article_id: i32, revision: i32, edit: bool) -> Self {
Self { Self { state, article_id, revision, edit }
state,
article_id,
revision,
edit,
}
} }
} }
@ -81,23 +70,12 @@ pub fn last_updated(article_id: i32, created: &DateTime<Local>, author: Option<&
Template { Template {
created: &created.to_rfc2822(), created: &created.to_rfc2822(),
article_history: &format!( article_history: &format!("_changes{}", QueryParameters::default().article_id(Some(article_id)).into_link()),
"_changes{}",
QueryParameters::default()
.article_id(Some(article_id))
.into_link()
),
author: author.map(|author| Author { author: author.map(|author| Author {
author, author: &author,
history: format!( history: format!("_changes{}", QueryParameters::default().author(Some(author.to_owned())).into_link()),
"_changes{}",
QueryParameters::default()
.author(Some(author.to_owned()))
.into_link()
),
}), }),
} }.to_string()
.to_string()
} }
impl Resource for ArticleResource { impl Resource for ArticleResource {
@ -107,49 +85,37 @@ impl Resource for ArticleResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let data = self let data = self.state.get_article_revision(self.article_id, self.revision)
.state
.get_article_revision(self.article_id, self.revision)
.map(|x| x.expect("Data model guarantees that this exists")); .map(|x| x.expect("Data model guarantees that this exists"));
let head = self.head(); let head = self.head();
Box::new(data.join(head).and_then(move |(data, head)| { Box::new(data.join(head)
Ok(head.with_body( .and_then(move |(data, head)| {
Layout { Ok(head
.with_body(Layout {
base: None, // Hmm, should perhaps accept `base` as argument base: None, // Hmm, should perhaps accept `base` as argument
title: &data.title, title: &data.title,
theme: data.theme,
body: &Template { body: &Template {
revision: data.revision, revision: data.revision,
last_updated: Some(&last_updated( last_updated: Some(&last_updated(
data.article_id, data.article_id,
&Local.from_utc_datetime(&data.created), &Local.from_utc_datetime(&data.created),
data.author.as_deref(), data.author.as_ref().map(|x| &**x)
)), )),
edit: self.edit, edit: self.edit,
cancel_url: Some(data.link()), cancel_url: Some(data.link()),
title: &data.title, title: &data.title,
raw: &data.body, raw: &data.body,
rendered: render_markdown(&data.body), rendered: render_markdown(&data.body),
themes: &theme::THEMES
.iter()
.map(|&x| SelectableTheme {
theme: x,
selected: x == data.theme,
})
.collect::<Vec<_>>(),
}, },
} }.to_string()))
.to_string(),
))
})) }))
} }
@ -159,7 +125,7 @@ impl Resource for ArticleResource {
use futures::Stream; use futures::Stream;
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/article_contents.html"] #[template="templates/article_contents.html"]
struct Template<'a> { struct Template<'a> {
title: &'a str, title: &'a str,
rendered: String, rendered: String,
@ -172,84 +138,69 @@ impl Resource for ArticleResource {
revision: i32, revision: i32,
title: &'a str, title: &'a str,
body: &'a str, body: &'a str,
theme: Theme,
rendered: &'a str, rendered: &'a str,
last_updated: &'a str, last_updated: &'a str,
} }
Box::new( Box::new(body
body.concat2() .concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into)) .and_then(|body| {
serde_urlencoded::from_bytes(&body)
.map_err(Into::into)
})
.and_then(move |update: UpdateArticle| { .and_then(move |update: UpdateArticle| {
self.state.update_article( self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity)
self.article_id,
update.base_revision,
update.title,
update.body,
identity,
update.theme,
)
}) })
.and_then(|updated| match updated { .and_then(|updated| match updated {
UpdateResult::Success(updated) => Ok(Response::new() UpdateResult::Success(updated) =>
Ok(Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone())) .with_header(ContentType(APPLICATION_JSON.clone()))
.with_body( .with_body(serde_json::to_string(&PutResponse {
serde_json::to_string(&PutResponse {
conflict: false, conflict: false,
slug: &updated.slug, slug: &updated.slug,
revision: updated.revision, revision: updated.revision,
title: &updated.title, title: &updated.title,
body: &updated.body, body: &updated.body,
theme: updated.theme,
rendered: &Template { rendered: &Template {
title: &updated.title, title: &updated.title,
rendered: render_markdown(&updated.body), rendered: render_markdown(&updated.body),
} }.to_string(),
.to_string(),
last_updated: &last_updated( last_updated: &last_updated(
updated.article_id, updated.article_id,
&Local.from_utc_datetime(&updated.created), &Local.from_utc_datetime(&updated.created),
updated.author.as_deref(), updated.author.as_ref().map(|x| &**x)
),
}).expect("Should never fail"))
), ),
})
.expect("Should never fail"),
)),
UpdateResult::RebaseConflict(RebaseConflict { UpdateResult::RebaseConflict(RebaseConflict {
base_article, base_article, title, body
title,
body,
theme,
}) => { }) => {
let title = title.flatten(); let title = title.flatten();
let body = body.flatten(); let body = body.flatten();
Ok(Response::new() Ok(Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone())) .with_header(ContentType(APPLICATION_JSON.clone()))
.with_body( .with_body(serde_json::to_string(&PutResponse {
serde_json::to_string(&PutResponse {
conflict: true, conflict: true,
slug: &base_article.slug, slug: &base_article.slug,
revision: base_article.revision, revision: base_article.revision,
title: &title, title: &title,
body: &body, body: &body,
theme,
rendered: &Template { rendered: &Template {
title: &title, title: &title,
rendered: render_markdown(&body), rendered: render_markdown(&body),
} }.to_string(),
.to_string(),
last_updated: &last_updated( last_updated: &last_updated(
base_article.article_id, base_article.article_id,
&Local.from_utc_datetime(&base_article.created), &Local.from_utc_datetime(&base_article.created),
base_article.author.as_deref(), base_article.author.as_ref().map(|x| &**x)
), ),
}) }).expect("Should never fail"))
.expect("Should never fail"), )
))
} }
}), })
) )
} }
@ -258,67 +209,53 @@ impl Resource for ArticleResource {
use futures::Stream; use futures::Stream;
Box::new( Box::new(body
body.concat2() .concat2()
.map_err(Into::into)
.and_then(|body| {
serde_urlencoded::from_bytes(&body)
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into))
.and_then(move |update: UpdateArticle| {
self.state.update_article(
self.article_id,
update.base_revision,
update.title,
update.body,
identity,
update.theme,
)
}) })
.and_then(|updated| match updated { .and_then(move |update: UpdateArticle| {
self.state.update_article(self.article_id, update.base_revision, update.title, update.body, identity)
})
.and_then(|updated| {
match updated {
UpdateResult::Success(updated) => Ok(Response::new() UpdateResult::Success(updated) => Ok(Response::new()
.with_status(hyper::StatusCode::SeeOther) .with_status(hyper::StatusCode::SeeOther)
.with_header(ContentType(TEXT_PLAIN.clone())) .with_header(ContentType(TEXT_PLAIN.clone()))
.with_header(Location::new(updated.link().to_owned())) .with_header(Location::new(updated.link().to_owned()))
.with_body("See other")), .with_body("See other")
),
UpdateResult::RebaseConflict(RebaseConflict { UpdateResult::RebaseConflict(RebaseConflict {
base_article, base_article, title, body
title,
body,
theme,
}) => { }) => {
let title = title.flatten(); let title = title.flatten();
let body = body.flatten(); let body = body.flatten();
Ok(Response::new() Ok(Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body( .with_body(Layout {
Layout {
base: None, base: None,
title: &title, title: &title,
theme,
body: &Template { body: &Template {
revision: base_article.revision, revision: base_article.revision,
last_updated: Some(&last_updated( last_updated: Some(&last_updated(
base_article.article_id, base_article.article_id,
&Local.from_utc_datetime(&base_article.created), &Local.from_utc_datetime(&base_article.created),
base_article.author.as_deref(), base_article.author.as_ref().map(|x| &**x)
)), )),
edit: true, edit: true,
cancel_url: Some(base_article.link()), cancel_url: Some(base_article.link()),
title: &title, title: &title,
raw: &body, raw: &body,
rendered: render_markdown(&body), rendered: render_markdown(&body),
themes: &theme::THEMES
.iter()
.map(|&x| SelectableTheme {
theme: x,
selected: x == theme,
})
.collect::<Vec<_>>(),
}, },
}.to_string())
)
} }
.to_string(),
))
} }
}), })
) )
} }
} }

View file

@ -1,18 +1,18 @@
use chrono::{DateTime, Local, TimeZone}; use chrono::{TimeZone, DateTime, Local};
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
use crate::mimes::*; use components::changes::QueryParameters;
use crate::models; use mimes::*;
use crate::rendering::render_markdown; use models;
use crate::site::system_page; use pagination::Pagination;
use crate::web::{Resource, ResponseFuture}; use rendering::render_markdown;
use site::system_page;
use web::{Resource, ResponseFuture};
use super::changes_resource::QueryParameters;
use super::diff_resource; use super::diff_resource;
use super::pagination::Pagination;
pub struct ArticleRevisionResource { pub struct ArticleRevisionResource {
data: models::ArticleRevision, data: models::ArticleRevision,
@ -24,12 +24,7 @@ impl ArticleRevisionResource {
} }
} }
pub fn timestamp_and_author( pub fn timestamp_and_author(sequence_number: i32, article_id: i32, created: &DateTime<Local>, author: Option<&str>) -> String {
sequence_number: i32,
article_id: i32,
created: &DateTime<Local>,
author: Option<&str>,
) -> String {
struct Author<'a> { struct Author<'a> {
author: &'a str, author: &'a str,
history: String, history: String,
@ -47,17 +42,15 @@ pub fn timestamp_and_author(
Template { Template {
created: &created.to_rfc2822(), created: &created.to_rfc2822(),
article_history: &format!( article_history: &format!("_changes{}",
"_changes{}",
QueryParameters::default() QueryParameters::default()
.pagination(pagination) .pagination(pagination)
.article_id(Some(article_id)) .article_id(Some(article_id))
.into_link() .into_link()
), ),
author: author.map(|author| Author { author: author.map(|author| Author {
author, author: &author,
history: format!( history: format!("_changes{}",
"_changes{}",
QueryParameters::default() QueryParameters::default()
.pagination(pagination) .pagination(pagination)
.article_id(Some(article_id)) .article_id(Some(article_id))
@ -65,8 +58,7 @@ pub fn timestamp_and_author(
.into_link() .into_link()
), ),
}), }),
} }.to_string()
.to_string()
} }
impl Resource for ArticleRevisionResource { impl Resource for ArticleRevisionResource {
@ -76,16 +68,15 @@ impl Resource for ArticleRevisionResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/article_revision.html"] #[template="templates/article_revision.html"]
struct Template<'a> { struct Template<'a> {
link_current: &'a str, link_current: &'a str,
timestamp_and_author: &'a str, timestamp_and_author: &'a str,
@ -96,9 +87,9 @@ impl Resource for ArticleRevisionResource {
let head = self.head(); let head = self.head();
let data = self.data; let data = self.data;
Box::new(head.and_then(move |head| { Box::new(head
Ok(head.with_body( .and_then(move |head|
system_page( Ok(head.with_body(system_page(
Some("../../"), // Hmm, should perhaps accept `base` as argument Some("../../"), // Hmm, should perhaps accept `base` as argument
&data.title, &data.title,
&Template { &Template {
@ -107,11 +98,11 @@ impl Resource for ArticleRevisionResource {
data.sequence_number, data.sequence_number,
data.article_id, data.article_id,
&Local.from_utc_datetime(&data.created), &Local.from_utc_datetime(&data.created),
data.author.as_deref(), data.author.as_ref().map(|x| &**x)
), ),
diff_link: if data.revision > 1 { diff_link:
Some(format!( if data.revision > 1 {
"_diff/{}?{}", Some(format!("_diff/{}?{}",
data.article_id, data.article_id,
diff_resource::QueryParameters::new( diff_resource::QueryParameters::new(
data.revision as u32 - 1, data.revision as u32 - 1,
@ -123,9 +114,7 @@ impl Resource for ArticleRevisionResource {
}, },
rendered: render_markdown(&data.body), rendered: render_markdown(&data.body),
}, },
) ).to_string()))
.to_string(),
)) ))
}))
} }
} }

View file

@ -1,437 +0,0 @@
use futures::future::{done, finished};
use futures::{self, Future};
use hyper::header::ContentType;
use hyper::server::*;
use crate::mimes::*;
use crate::schema::article_revisions;
use crate::site::system_page;
use crate::state::State;
use crate::web::{Resource, ResponseFuture};
use super::diff_resource;
use super::pagination::Pagination;
use super::TemporaryRedirectResource;
const DEFAULT_LIMIT: i32 = 30;
type BoxResource = Box<dyn Resource + Sync + Send>;
#[derive(Clone)]
pub struct ChangesLookup {
state: State,
show_authors: bool,
}
#[derive(Serialize, Deserialize, Default)]
pub struct QueryParameters {
after: Option<i32>,
before: Option<i32>,
article_id: Option<i32>,
author: Option<String>,
limit: Option<i32>,
}
impl QueryParameters {
pub fn pagination(self, pagination: Pagination<i32>) -> Self {
Self {
after: if let Pagination::After(x) = pagination {
Some(x)
} else {
None
},
before: if let Pagination::Before(x) = pagination {
Some(x)
} else {
None
},
..self
}
}
pub fn article_id(self, article_id: Option<i32>) -> Self {
Self { article_id, ..self }
}
pub fn author(self, author: Option<String>) -> Self {
Self { author, ..self }
}
pub fn limit(self, limit: i32) -> Self {
Self {
limit: if limit != DEFAULT_LIMIT {
Some(limit)
} else {
None
},
..self
}
}
pub fn into_link(self) -> String {
let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail");
if !args.is_empty() {
format!("?{}", args)
} else {
"_changes".to_owned()
}
}
}
fn apply_query_config(
mut query: article_revisions::BoxedQuery<diesel::sqlite::Sqlite>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
) -> article_revisions::BoxedQuery<diesel::sqlite::Sqlite> {
use diesel::prelude::*;
if let Some(article_id) = article_id {
query = query.filter(article_revisions::article_id.eq(article_id));
}
if let Some(author) = author {
query = query.filter(article_revisions::author.eq(author));
}
query.limit(limit as i64 + 1)
}
impl ChangesLookup {
pub fn new(state: State, show_authors: bool) -> ChangesLookup {
Self {
state,
show_authors,
}
}
pub fn lookup(
&self,
query: Option<&str>,
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
use super::pagination;
let state = self.state.clone();
let show_authors = self.show_authors;
Box::new(
done((|| {
let params: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?;
let pagination = pagination::from_fields(params.after, params.before)?;
let limit = match params.limit {
None => Ok(DEFAULT_LIMIT),
Some(x) if (1..=100).contains(&x) => Ok(x),
_ => Err("`limit` argument must be in range [1, 100]"),
}?;
Ok((pagination, params.article_id, params.author, limit))
})())
.and_then(move |(pagination, article_id, author, limit)| {
match pagination {
Pagination::After(x) => {
let author2 = author.clone();
Box::new(
state
.query_article_revision_stubs(move |query| {
use diesel::prelude::*;
apply_query_config(query, article_id, author2, limit)
.filter(article_revisions::sequence_number.gt(x))
.order(article_revisions::sequence_number.asc())
})
.and_then(move |mut data| {
let extra_element = if data.len() > limit as usize {
data.pop()
} else {
None
};
let args = QueryParameters {
after: None,
before: None,
article_id,
author,
limit: None,
}
.limit(limit);
Ok(Some(match extra_element {
Some(x) => Box::new(TemporaryRedirectResource::new(
args.pagination(Pagination::Before(x.sequence_number))
.into_link(),
))
as BoxResource,
None => Box::new(TemporaryRedirectResource::new(
args.into_link(),
))
as BoxResource,
}))
}),
)
as Box<
dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>,
>
}
Pagination::Before(x) => {
Box::new(finished(Some(Box::new(ChangesResource::new(
state,
show_authors,
Some(x),
article_id,
author,
limit,
)) as BoxResource)))
}
Pagination::None => Box::new(finished(Some(Box::new(ChangesResource::new(
state,
show_authors,
None,
article_id,
author,
limit,
))
as BoxResource))),
}
}),
)
}
}
pub struct ChangesResource {
state: State,
show_authors: bool,
before: Option<i32>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
}
impl ChangesResource {
pub fn new(
state: State,
show_authors: bool,
before: Option<i32>,
article_id: Option<i32>,
author: Option<String>,
limit: i32,
) -> Self {
Self {
state,
show_authors,
before,
article_id,
author,
limit,
}
}
fn query_args(&self) -> QueryParameters {
QueryParameters {
after: None,
before: self.before,
article_id: self.article_id,
author: self.author.clone(),
..QueryParameters::default()
}
.limit(self.limit)
}
}
impl Resource for ChangesResource {
fn allow(&self) -> Vec<hyper::Method> {
use hyper::Method::*;
vec![Options, Head, Get]
}
fn head(&self) -> ResponseFuture {
Box::new(futures::finished(
Response::new()
.with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())),
))
}
fn get(self: Box<Self>) -> ResponseFuture {
use chrono::{Local, TimeZone};
struct Row<'a> {
resource: &'a ChangesResource,
sequence_number: i32,
article_id: i32,
revision: i32,
created: String,
author: Option<String>,
_slug: String,
title: String,
_latest: bool,
diff_link: Option<String>,
}
impl<'a> Row<'a> {
fn author_link(&self) -> String {
self.resource
.query_args()
.pagination(Pagination::After(self.sequence_number))
.author(self.author.clone())
.into_link()
}
}
struct NavLinks {
more: String,
end: String,
}
#[derive(BartDisplay)]
#[template = "templates/changes.html"]
struct Template<'a> {
resource: &'a ChangesResource,
show_authors: bool,
newer: Option<NavLinks>,
older: Option<NavLinks>,
changes: &'a [Row<'a>],
}
impl<'a> Template<'a> {
fn subject_clause(&self) -> String {
match self.resource.article_id {
Some(x) => format!(" <a href=\"_by_id/{}\">this article</a>", x),
None => " the wiki".to_string(),
}
}
fn author(&self) -> Option<String> {
self.resource.author.clone()
}
fn all_articles_link(&self) -> Option<String> {
self.resource
.article_id
.map(|_| self.resource.query_args().article_id(None).into_link())
}
fn all_authors_link(&self) -> Option<String> {
self.resource
.author
.as_ref()
.map(|_| self.resource.query_args().author(None).into_link())
}
}
let (before, article_id, author, limit) = (
self.before,
self.article_id,
self.author.clone(),
self.limit,
);
let data = self.state.query_article_revision_stubs(move |query| {
use diesel::prelude::*;
let query = apply_query_config(query, article_id, author, limit)
.order(article_revisions::sequence_number.desc());
match before {
Some(x) => query.filter(article_revisions::sequence_number.lt(x)),
None => query,
}
});
let head = self.head();
Box::new(data.join(head).and_then(move |(mut data, head)| {
let extra_element = if data.len() > self.limit as usize {
data.pop()
} else {
None
};
let (newer, older) = match self.before {
Some(x) => (
Some(NavLinks {
more: self
.query_args()
.pagination(Pagination::After(x - 1))
.into_link(),
end: self.query_args().pagination(Pagination::None).into_link(),
}),
extra_element.map(|_| NavLinks {
more: self
.query_args()
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
.into_link(),
end: self
.query_args()
.pagination(Pagination::After(0))
.into_link(),
}),
),
None => (
None,
extra_element.map(|_| NavLinks {
more: self
.query_args()
.pagination(Pagination::Before(data.last().unwrap().sequence_number))
.into_link(),
end: self
.query_args()
.pagination(Pagination::After(0))
.into_link(),
}),
),
};
let changes = &data
.into_iter()
.map(|x| Row {
resource: &self,
sequence_number: x.sequence_number,
article_id: x.article_id,
revision: x.revision,
created: Local.from_utc_datetime(&x.created).to_rfc2822(),
author: x.author,
_slug: x.slug,
title: x.title,
_latest: x.latest,
diff_link: if x.revision > 1 {
Some(format!(
"_diff/{}?{}",
x.article_id,
diff_resource::QueryParameters::new(
x.revision as u32 - 1,
x.revision as u32,
)
))
} else {
None
},
})
.collect::<Vec<_>>();
Ok(head.with_body(
system_page(
None, // Hmm, should perhaps accept `base` as argument
"Changes",
Template {
resource: &self,
show_authors: self.show_authors,
newer,
older,
changes,
},
)
.to_string(),
))
}))
}
}

View file

@ -1,22 +1,22 @@
use std::fmt; use std::fmt;
use futures::future::done; use diff;
use futures::{self, Future}; use futures::{self, Future};
use futures::future::done;
use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
use serde_urlencoded;
use crate::mimes::*; use components::changes;
use crate::models::ArticleRevision; use mimes::*;
use crate::site::Layout; use models::ArticleRevision;
use crate::state::State; use pagination::Pagination;
use crate::theme; use site::Layout;
use crate::web::{Resource, ResponseFuture}; use state::State;
use web::{Resource, ResponseFuture};
use super::changes_resource; type BoxResource = Box<Resource + Sync + Send>;
use super::pagination::Pagination;
type BoxResource = Box<dyn Resource + Sync + Send>;
#[derive(Clone)] #[derive(Clone)]
pub struct DiffLookup { pub struct DiffLookup {
@ -46,28 +46,25 @@ impl DiffLookup {
Self { state } Self { state }
} }
pub fn lookup( pub fn lookup(&self, article_id: u32, query: Option<&str>) -> Box<Future<Item=Option<BoxResource>, Error=::web::Error>> {
&self,
article_id: u32,
query: Option<&str>,
) -> Box<dyn Future<Item = Option<BoxResource>, Error = crate::web::Error>> {
let state = self.state.clone(); let state = self.state.clone();
Box::new( Box::new(done(
done(serde_urlencoded::from_str(query.unwrap_or("")).map_err(Into::into)) serde_urlencoded::from_str(query.unwrap_or(""))
.and_then(move |params: QueryParameters| { .map_err(Into::into)
).and_then(move |params: QueryParameters| {
let from = state.get_article_revision(article_id as i32, params.from as i32); let from = state.get_article_revision(article_id as i32, params.from as i32);
let to = state.get_article_revision(article_id as i32, params.to as i32); let to = state.get_article_revision(article_id as i32, params.to as i32);
from.join(to) from.join(to)
}) }).and_then(move |(from, to)| {
.and_then(move |(from, to)| match (from, to) { match (from, to) {
(Some(from), Some(to)) => { (Some(from), Some(to)) =>
Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource)) Ok(Some(Box::new(DiffResource::new(from, to)) as BoxResource)),
_ =>
Ok(None),
} }
_ => Ok(None), }))
}),
)
} }
} }
@ -90,10 +87,9 @@ impl Resource for DiffResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
@ -103,8 +99,6 @@ impl Resource for DiffResource {
struct Template<'a> { struct Template<'a> {
consecutive: bool, consecutive: bool,
article_id: u32, article_id: u32,
author: Option<&'a str>,
author_link: &'a str,
article_history_link: &'a str, article_history_link: &'a str,
from_link: &'a str, from_link: &'a str,
to_link: &'a str, to_link: &'a str,
@ -121,88 +115,41 @@ impl Resource for DiffResource {
let head = self.head(); let head = self.head();
Box::new(head.and_then(move |head| { Box::new(head
let consecutive = self.to.revision - self.from.revision == 1; .and_then(move |head| {
Ok(head
let author = match consecutive { .with_body(Layout {
true => self.to.author.as_deref(),
false => None,
};
let author_link = &format!(
"_changes{}",
changes_resource::QueryParameters::default()
.author(author.map(|x| x.to_owned()))
.pagination(Pagination::After(self.from.sequence_number))
.into_link()
);
let article_history_link = &format!(
"_changes{}",
changes_resource::QueryParameters::default()
.article_id(Some(self.from.article_id))
.pagination(Pagination::After(self.from.sequence_number))
.into_link()
);
let title = &diff::chars(&self.from.title, &self.to.title)
.into_iter()
.map(|x| match x {
diff::Result::Left(x) => Diff {
removed: Some(x),
..Default::default()
},
diff::Result::Both(x, _) => Diff {
same: Some(x),
..Default::default()
},
diff::Result::Right(x) => Diff {
added: Some(x),
..Default::default()
},
})
.collect::<Vec<_>>();
let lines = &diff::lines(&self.from.body, &self.to.body)
.into_iter()
.map(|x| match x {
diff::Result::Left(x) => Diff {
removed: Some(x),
..Default::default()
},
diff::Result::Both(x, _) => Diff {
same: Some(x),
..Default::default()
},
diff::Result::Right(x) => Diff {
added: Some(x),
..Default::default()
},
})
.collect::<Vec<_>>();
Ok(head.with_body(
Layout {
base: Some("../"), // Hmm, should perhaps accept `base` as argument base: Some("../"), // Hmm, should perhaps accept `base` as argument
title: "Difference", title: "Difference",
theme: theme::theme_from_str_hash("Difference"),
body: &Template { body: &Template {
consecutive, consecutive: self.to.revision - self.from.revision == 1,
article_id: self.from.article_id as u32, article_id: self.from.article_id as u32,
author, article_history_link: &format!("_changes{}",
author_link, changes::QueryParameters::default()
article_history_link, .article_id(Some(self.from.article_id))
from_link: &format!( .pagination(Pagination::After(self.from.revision))
"_revisions/{}/{}", .into_link()
self.from.article_id, self.from.revision
), ),
from_link: &format!("_revisions/{}/{}", self.from.article_id, self.from.revision),
to_link: &format!("_revisions/{}/{}", self.to.article_id, self.to.revision), to_link: &format!("_revisions/{}/{}", self.to.article_id, self.to.revision),
title, title: &diff::chars(&self.from.title, &self.to.title)
lines, .into_iter()
.map(|x| match x {
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() },
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() },
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() },
})
.collect::<Vec<_>>(),
lines: &diff::lines(&self.from.body, &self.to.body)
.into_iter()
.map(|x| match x {
diff::Result::Left(x) => Diff { removed: Some(x), ..Default::default() },
diff::Result::Both(x, _) => Diff { same: Some(x), ..Default::default() },
diff::Result::Right(x) => Diff { added: Some(x), ..Default::default() },
})
.collect::<Vec<_>>()
}, },
} }.to_string()))
.to_string(),
))
})) }))
} }
} }

View file

@ -1,11 +1,11 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
use crate::mimes::*; use mimes::*;
use crate::site::system_page; use site::system_page;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
pub struct HtmlResource { pub struct HtmlResource {
base: Option<&'static str>, base: Option<&'static str>,
@ -15,11 +15,7 @@ pub struct HtmlResource {
impl HtmlResource { impl HtmlResource {
pub fn new(base: Option<&'static str>, title: &'static str, html_body: &'static str) -> Self { pub fn new(base: Option<&'static str>, title: &'static str, html_body: &'static str) -> Self {
HtmlResource { HtmlResource { base, title, html_body }
base,
title,
html_body,
}
} }
} }
@ -30,18 +26,22 @@ impl Resource for HtmlResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
let head = self.head(); let head = self.head();
Box::new(head.and_then(move |head| { Box::new(head
Ok(head.with_body(system_page(self.base, self.title, self.html_body).to_string())) .and_then(move |head| {
Ok(head.with_body(system_page(
self.base,
self.title,
self.html_body
).to_string()))
})) }))
} }
} }

View file

@ -1,9 +1,6 @@
pub mod pagination;
mod about_resource; mod about_resource;
mod article_resource;
mod article_revision_resource; mod article_revision_resource;
mod changes_resource; mod article_resource;
mod diff_resource; mod diff_resource;
mod html_resource; mod html_resource;
mod new_article_resource; mod new_article_resource;
@ -13,10 +10,10 @@ mod sitemap_resource;
mod temporary_redirect_resource; mod temporary_redirect_resource;
pub use self::about_resource::AboutResource; pub use self::about_resource::AboutResource;
pub use self::article_resource::ArticleResource;
pub use self::article_revision_resource::ArticleRevisionResource; pub use self::article_revision_resource::ArticleRevisionResource;
pub use self::changes_resource::{ChangesLookup, ChangesResource}; pub use self::article_resource::ArticleResource;
pub use self::diff_resource::{DiffLookup, DiffResource}; pub use self::diff_resource::{DiffLookup, DiffResource};
pub use self::diff_resource::QueryParameters as DiffQueryParameters;
pub use self::html_resource::HtmlResource; pub use self::html_resource::HtmlResource;
pub use self::new_article_resource::NewArticleResource; pub use self::new_article_resource::NewArticleResource;
pub use self::read_only_resource::ReadOnlyResource; pub use self::read_only_resource::ReadOnlyResource;

View file

@ -1,15 +1,16 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::{ContentType, Location}; use hyper::header::{ContentType, Location};
use hyper::server::*; use hyper::server::*;
use serde_json;
use serde_urlencoded;
use crate::assets::ScriptJs; use assets::ScriptJs;
use crate::mimes::*; use mimes::*;
use crate::rendering::render_markdown; use rendering::render_markdown;
use crate::site::Layout; use site::Layout;
use crate::state::State; use state::State;
use crate::theme::{self, Theme}; use web::{Resource, ResponseFuture};
use crate::web::{Resource, ResponseFuture};
const NEW: &str = "NEW"; const NEW: &str = "NEW";
@ -26,7 +27,6 @@ fn title_from_slug(slug: &str) -> String {
pub struct NewArticleResource { pub struct NewArticleResource {
state: State, state: State,
slug: Option<String>, slug: Option<String>,
edit: bool,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -34,12 +34,11 @@ struct CreateArticle {
base_revision: String, base_revision: String,
title: String, title: String,
body: String, body: String,
theme: Option<Theme>,
} }
impl NewArticleResource { impl NewArticleResource {
pub fn new(state: State, slug: Option<String>, edit: bool) -> Self { pub fn new(state: State, slug: Option<String>) -> Self {
Self { state, slug, edit } Self { state, slug }
} }
} }
@ -50,22 +49,15 @@ impl Resource for NewArticleResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::NotFound) .with_status(hyper::StatusCode::NotFound)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
// TODO Remove duplication with article_resource.rs:
struct SelectableTheme {
theme: Theme,
selected: bool,
}
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/article.html"] #[template="templates/article.html"]
struct Template<'a> { struct Template<'a> {
revision: &'a str, revision: &'a str,
last_updated: Option<&'a str>, last_updated: Option<&'a str>,
@ -75,7 +67,6 @@ impl Resource for NewArticleResource {
title: &'a str, title: &'a str,
raw: &'a str, raw: &'a str,
rendered: &'a str, rendered: &'a str,
themes: &'a [SelectableTheme],
} }
impl<'a> Template<'a> { impl<'a> Template<'a> {
fn script_js(&self) -> &'static str { fn script_js(&self) -> &'static str {
@ -83,36 +74,29 @@ impl Resource for NewArticleResource {
} }
} }
let title = self let title = self.slug.as_ref()
.slug
.as_ref()
.map_or("".to_owned(), |x| title_from_slug(x)); .map_or("".to_owned(), |x| title_from_slug(x));
Box::new(self.head().and_then(move |head| { Box::new(self.head()
Ok(head.with_body( .and_then(move |head| {
Layout { Ok(head
.with_body(Layout {
base: None, // Hmm, should perhaps accept `base` as argument base: None, // Hmm, should perhaps accept `base` as argument
title: &title, title: &title,
theme: theme::Theme::Gray,
body: &Template { body: &Template {
revision: NEW, revision: NEW,
last_updated: None, last_updated: None,
edit: self.edit,
cancel_url: self.slug.as_deref(), // Implicitly start in edit-mode when no slug is given. This
// currently directly corresponds to the /_new endpoint
edit: self.slug.is_none(),
cancel_url: self.slug.as_ref().map(|x| &**x),
title: &title, title: &title,
raw: "", raw: "",
rendered: EMPTY_ARTICLE_MESSAGE, rendered: EMPTY_ARTICLE_MESSAGE,
themes: &theme::THEMES
.iter()
.map(|&x| SelectableTheme {
theme: x,
selected: false,
})
.collect::<Vec<_>>(),
}, },
} }.to_string()))
.to_string(),
))
})) }))
} }
@ -120,11 +104,11 @@ impl Resource for NewArticleResource {
// TODO Check incoming Content-Type // TODO Check incoming Content-Type
// TODO Refactor? Reduce duplication with ArticleResource::put? // TODO Refactor? Reduce duplication with ArticleResource::put?
use chrono::{Local, TimeZone}; use chrono::{TimeZone, Local};
use futures::Stream; use futures::Stream;
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/article_contents.html"] #[template="templates/article_contents.html"]
struct Template<'a> { struct Template<'a> {
title: &'a str, title: &'a str,
rendered: String, rendered: String,
@ -137,56 +121,45 @@ impl Resource for NewArticleResource {
revision: i32, revision: i32,
title: &'a str, title: &'a str,
body: &'a str, body: &'a str,
theme: Theme,
rendered: &'a str, rendered: &'a str,
last_updated: &'a str, last_updated: &'a str,
} }
Box::new( Box::new(body
body.concat2() .concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into)) .and_then(|body| {
serde_urlencoded::from_bytes(&body)
.map_err(Into::into)
})
.and_then(move |arg: CreateArticle| { .and_then(move |arg: CreateArticle| {
if arg.base_revision != NEW { if arg.base_revision != NEW {
unimplemented!("Version update conflict"); unimplemented!("Version update conflict");
} }
let theme = arg.theme.unwrap_or_else(theme::random); self.state.create_article(self.slug.clone(), arg.title, arg.body, identity)
self.state.create_article(
self.slug.clone(),
arg.title,
arg.body,
identity,
theme,
)
}) })
.and_then(|updated| { .and_then(|updated| {
futures::finished( futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(APPLICATION_JSON.clone())) .with_header(ContentType(APPLICATION_JSON.clone()))
.with_body( .with_body(serde_json::to_string(&PutResponse {
serde_json::to_string(&PutResponse {
slug: &updated.slug, slug: &updated.slug,
article_id: updated.article_id, article_id: updated.article_id,
revision: updated.revision, revision: updated.revision,
title: &updated.title, title: &updated.title,
body: &updated.body, body: &updated.body,
theme: updated.theme,
rendered: &Template { rendered: &Template {
title: &updated.title, title: &updated.title,
rendered: render_markdown(&updated.body), rendered: render_markdown(&updated.body),
} }.to_string(),
.to_string(),
last_updated: &super::article_resource::last_updated( last_updated: &super::article_resource::last_updated(
updated.article_id, updated.article_id,
&Local.from_utc_datetime(&updated.created), &Local.from_utc_datetime(&updated.created),
updated.author.as_deref(), updated.author.as_ref().map(|x| &**x)
),
})
.expect("Should never fail"),
), ),
}).expect("Should never fail"))
) )
}), })
) )
} }
@ -196,32 +169,27 @@ impl Resource for NewArticleResource {
use futures::Stream; use futures::Stream;
Box::new( Box::new(body
body.concat2() .concat2()
.map_err(Into::into) .map_err(Into::into)
.and_then(|body| serde_urlencoded::from_bytes(&body).map_err(Into::into)) .and_then(|body| {
serde_urlencoded::from_bytes(&body)
.map_err(Into::into)
})
.and_then(move |arg: CreateArticle| { .and_then(move |arg: CreateArticle| {
if arg.base_revision != NEW { if arg.base_revision != NEW {
unimplemented!("Version update conflict"); unimplemented!("Version update conflict");
} }
let theme = arg.theme.unwrap_or_else(theme::random); self.state.create_article(self.slug.clone(), arg.title, arg.body, identity)
self.state.create_article(
self.slug.clone(),
arg.title,
arg.body,
identity,
theme,
)
}) })
.and_then(|updated| { .and_then(|updated| {
futures::finished( futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::SeeOther) .with_status(hyper::StatusCode::SeeOther)
.with_header(ContentType(TEXT_PLAIN.clone())) .with_header(ContentType(TEXT_PLAIN.clone()))
.with_header(Location::new(updated.link().to_owned())) .with_header(Location::new(updated.link().to_owned()))
.with_body("See other"), .with_body("See other")
) )
}), })
) )
} }
} }

View file

@ -1,9 +1,9 @@
use futures::Future; use futures::Future;
use hyper::header::{CacheControl, CacheDirective, ContentLength, ContentType}; use hyper::header::{ContentType, ContentLength, CacheControl, CacheDirective};
use hyper::server::*; use hyper::server::*;
use hyper::StatusCode; use hyper::StatusCode;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
#[allow(unused)] #[allow(unused)]
pub struct ReadOnlyResource { pub struct ReadOnlyResource {
@ -18,21 +18,21 @@ impl Resource for ReadOnlyResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(::futures::finished( Box::new(::futures::finished(Response::new()
Response::new()
.with_status(StatusCode::Ok) .with_status(StatusCode::Ok)
.with_header(ContentType(self.content_type.clone())) .with_header(ContentType(self.content_type.clone()))
.with_header(CacheControl(vec![ .with_header(CacheControl(vec![
CacheDirective::MustRevalidate, CacheDirective::MustRevalidate,
CacheDirective::NoStore, CacheDirective::NoStore,
])), ]))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
Box::new(self.head().map(move |head| { Box::new(self.head().map(move |head|
head.with_header(ContentLength(self.body.len() as u64)) head
.with_header(ContentLength(self.body.len() as u64))
.with_body(self.body.clone()) .with_body(self.body.clone())
})) ))
} }
} }

View file

@ -1,18 +1,20 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::{Accept, ContentType}; use hyper::header::{Accept, ContentType};
use hyper::server::*; use hyper::server::*;
use serde_json;
use serde_urlencoded;
use crate::mimes::*; use mimes::*;
use crate::models::SearchResult; use models::SearchResult;
use crate::site::system_page; use site::system_page;
use crate::state::State; use state::State;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
const DEFAULT_LIMIT: u32 = 10; const DEFAULT_LIMIT: u32 = 10;
const DEFAULT_SNIPPET_SIZE: u32 = 30; const DEFAULT_SNIPPET_SIZE: u32 = 30;
type BoxResource = Box<dyn Resource + Sync + Send>; type BoxResource = Box<Resource + Sync + Send>;
#[derive(Serialize, Deserialize, Default)] #[derive(Serialize, Deserialize, Default)]
pub struct QueryParameters { pub struct QueryParameters {
@ -32,29 +34,21 @@ impl QueryParameters {
pub fn limit(self, limit: u32) -> Self { pub fn limit(self, limit: u32) -> Self {
Self { Self {
limit: if limit != DEFAULT_LIMIT { limit: if limit != DEFAULT_LIMIT { Some(limit) } else { None },
Some(limit)
} else {
None
},
..self ..self
} }
} }
pub fn snippet_size(self, snippet_size: u32) -> Self { pub fn snippet_size(self, snippet_size: u32) -> Self {
Self { Self {
snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE { snippet_size: if snippet_size != DEFAULT_SNIPPET_SIZE { Some(snippet_size) } else { None },
Some(snippet_size)
} else {
None
},
..self ..self
} }
} }
pub fn into_link(self) -> String { pub fn into_link(self) -> String {
let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail"); let args = serde_urlencoded::to_string(self).expect("Serializing to String cannot fail");
if !args.is_empty() { if args.len() > 0 {
format!("_search?{}", args) format!("_search?{}", args)
} else { } else {
"_search".to_owned() "_search".to_owned()
@ -72,16 +66,18 @@ impl SearchLookup {
Self { state } Self { state }
} }
pub fn lookup(&self, query: Option<&str>) -> Result<Option<BoxResource>, crate::web::Error> { pub fn lookup(&self, query: Option<&str>) -> Result<Option<BoxResource>, ::web::Error> {
let args: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?; let args: QueryParameters = serde_urlencoded::from_str(query.unwrap_or(""))?;
Ok(Some(Box::new(SearchResource::new( Ok(Some(Box::new(
SearchResource::new(
self.state.clone(), self.state.clone(),
args.q, args.q,
args.limit.unwrap_or(DEFAULT_LIMIT), args.limit.unwrap_or(DEFAULT_LIMIT),
args.offset.unwrap_or(0), args.offset.unwrap_or(0),
args.snippet_size.unwrap_or(DEFAULT_SNIPPET_SIZE), args.snippet_size.unwrap_or(DEFAULT_SNIPPET_SIZE),
)))) )
)))
} }
} }
@ -102,21 +98,8 @@ pub enum ResponseType {
} }
impl SearchResource { impl SearchResource {
pub fn new( pub fn new(state: State, query: Option<String>, limit: u32, offset: u32, snippet_size: u32) -> Self {
state: State, Self { state, response_type: ResponseType::Html, query, limit, offset, snippet_size }
query: Option<String>,
limit: u32,
offset: u32,
snippet_size: u32,
) -> Self {
Self {
state,
response_type: ResponseType::Html,
query,
limit,
offset,
snippet_size,
}
} }
fn query_args(&self) -> QueryParameters { fn query_args(&self) -> QueryParameters {
@ -143,24 +126,21 @@ impl Resource for SearchResource {
self.response_type = match accept.first() { self.response_type = match accept.first() {
Some(&QualityItem { item: ref mime, .. }) Some(&QualityItem { item: ref mime, .. })
if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON => if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON
{ => ResponseType::Json,
ResponseType::Json
}
_ => ResponseType::Html, _ => ResponseType::Html,
}; };
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
let content_type = match self.response_type { let content_type = match &self.response_type {
ResponseType::Json => ContentType(APPLICATION_JSON.clone()), &ResponseType::Json => ContentType(APPLICATION_JSON.clone()),
ResponseType::Html => ContentType(TEXT_HTML.clone()), &ResponseType::Html => ContentType(TEXT_HTML.clone()),
}; };
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(content_type), .with_header(content_type)
)) ))
} }
@ -174,7 +154,7 @@ impl Resource for SearchResource {
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/search.html"] #[template="templates/search.html"]
struct Template<'a> { struct Template<'a> {
query: &'a str, query: &'a str,
hits: &'a [(usize, &'a SearchResult)], hits: &'a [(usize, &'a SearchResult)],
@ -183,26 +163,17 @@ impl Resource for SearchResource {
} }
// TODO: Show a search "front page" when no query is given: // TODO: Show a search "front page" when no query is given:
let query = self let query = self.query.as_ref().map(|x| x.clone()).unwrap_or("".to_owned());
.query
.as_ref()
.cloned()
.unwrap_or_else(|| "".to_owned());
let data = self.state.search_query( let data = self.state.search_query(query, (self.limit + 1) as i32, self.offset as i32, self.snippet_size as i32);
query,
(self.limit + 1) as i32,
self.offset as i32,
self.snippet_size as i32,
);
let head = self.head(); let head = self.head();
Box::new(data.join(head).and_then(move |(mut data, head)| { Box::new(data.join(head)
.and_then(move |(mut data, head)| {
let prev = if self.offset > 0 { let prev = if self.offset > 0 {
Some( Some(self.query_args()
self.query_args()
.offset(self.offset.saturating_sub(self.limit)) .offset(self.offset.saturating_sub(self.limit))
.into_link(), .into_link()
) )
} else { } else {
None None
@ -210,38 +181,35 @@ impl Resource for SearchResource {
let next = if data.len() > self.limit as usize { let next = if data.len() > self.limit as usize {
data.pop(); data.pop();
Some( Some(self.query_args()
self.query_args()
.offset(self.offset + self.limit) .offset(self.offset + self.limit)
.into_link(), .into_link()
) )
} else { } else {
None None
}; };
match self.response_type { match &self.response_type {
ResponseType::Json => Ok(head.with_body( &ResponseType::Json => Ok(head
serde_json::to_string(&JsonResponse { .with_body(serde_json::to_string(&JsonResponse {
query: self.query.as_deref().unwrap_or(""), query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
hits: &data, hits: &data,
prev, prev,
next, next,
}) }).expect("Should never fail"))
.expect("Should never fail"), ),
)), &ResponseType::Html => Ok(head.with_body(system_page(
ResponseType::Html => Ok(head.with_body(
system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"Search", "Search",
&Template { &Template {
query: self.query.as_deref().unwrap_or(""), query: self.query.as_ref().map(|x| &**x).unwrap_or(""),
hits: &data.iter().enumerate().collect::<Vec<_>>(), hits: &data.iter()
.enumerate()
.collect::<Vec<_>>(),
prev, prev,
next, next,
}, },
) ).to_string())),
.to_string(),
)),
} }
})) }))
} }

View file

@ -1,13 +1,13 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::ContentType; use hyper::header::ContentType;
use hyper::server::*; use hyper::server::*;
use crate::mimes::*; use mimes::*;
use crate::models::ArticleRevisionStub; use models::ArticleRevisionStub;
use crate::site::system_page; use site::system_page;
use crate::state::State; use state::State;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
pub struct SitemapResource { pub struct SitemapResource {
state: State, state: State,
@ -26,16 +26,15 @@ impl Resource for SitemapResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::Ok) .with_status(hyper::StatusCode::Ok)
.with_header(ContentType(TEXT_HTML.clone())), .with_header(ContentType(TEXT_HTML.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/sitemap.html"] #[template="templates/sitemap.html"]
struct Template<'a> { struct Template<'a> {
articles: &'a [ArticleRevisionStub], articles: &'a [ArticleRevisionStub],
} }
@ -43,17 +42,15 @@ impl Resource for SitemapResource {
let data = self.state.get_latest_article_revision_stubs(); let data = self.state.get_latest_article_revision_stubs();
let head = self.head(); let head = self.head();
Box::new(data.join(head).and_then(move |(articles, head)| { Box::new(data.join(head)
Ok(head.with_body( .and_then(move |(articles, head)| {
system_page( Ok(head.with_body(system_page(
None, // Hmm, should perhaps accept `base` as argument None, // Hmm, should perhaps accept `base` as argument
"Sitemap", "Sitemap",
Template { Template {
articles: &articles, articles: &articles,
}, },
) ).to_string()))
.to_string(),
))
})) }))
} }
} }

View file

@ -1,9 +1,9 @@
use futures::{self, Future}; use futures::{self, Future};
use hyper;
use hyper::header::Location; use hyper::header::Location;
use hyper::server::*; use hyper::server::*;
use crate::web::{Resource, ResponseFuture}; use web::{Resource, ResponseFuture};
pub struct TemporaryRedirectResource { pub struct TemporaryRedirectResource {
location: String, location: String,
@ -14,17 +14,14 @@ impl TemporaryRedirectResource {
Self { location } Self { location }
} }
pub fn from_slug<S: AsRef<str>>(slug: S, edit: bool) -> Self { pub fn from_slug<S: AsRef<str>>(slug: S) -> Self {
let base = if slug.as_ref().is_empty() { Self {
location:
if slug.as_ref().is_empty() {
"." "."
} else { } else {
slug.as_ref() slug.as_ref()
}; }.to_owned()
let tail = if edit { "?edit" } else { "" };
Self {
location: format!("{}{}", base, tail),
} }
} }
} }
@ -36,18 +33,18 @@ impl Resource for TemporaryRedirectResource {
} }
fn head(&self) -> ResponseFuture { fn head(&self) -> ResponseFuture {
Box::new(futures::finished( Box::new(futures::finished(Response::new()
Response::new()
.with_status(hyper::StatusCode::TemporaryRedirect) .with_status(hyper::StatusCode::TemporaryRedirect)
.with_header(Location::new(self.location.clone())), .with_header(Location::new(self.location.clone()))
)) ))
} }
fn get(self: Box<Self>) -> ResponseFuture { fn get(self: Box<Self>) -> ResponseFuture {
Box::new( Box::new(self.head()
self.head() .and_then(move |head| {
.and_then(move |head| Ok(head.with_body(format!("Moved to {}", self.location)))), Ok(head
) .with_body(format!("Moved to {}", self.location)))
}))
} }
fn put(self: Box<Self>, _body: hyper::Body, _identity: Option<String>) -> ResponseFuture { fn put(self: Box<Self>, _body: hyper::Body, _identity: Option<String>) -> ResponseFuture {

View file

@ -4,20 +4,24 @@
use std::fmt; use std::fmt;
use futures::{self, Future}; use futures::{self, Future};
use hyper::header::{Accept, ContentType, Server}; use hyper::header::{Accept, ContentType, Server};
use hyper::mime; use hyper::mime;
use hyper::server::*; use hyper::server::*;
use hyper;
use crate::assets::{SearchJs, StyleCss, ThemesCss}; use assets::{ThemesCss, StyleCss, SearchJs};
use crate::build_config; use build_config;
use crate::theme; use web::Scope;
use crate::web::Lookup; use wiki_lookup::WikiLookup;
use crate::wiki_lookup::WikiLookup;
const THEMES: [&str; 19] = ["red", "pink", "purple", "deep-purple", "indigo",
"blue", "light-blue", "cyan", "teal", "green", "light-green", "lime",
"yellow", "amber", "orange", "deep-orange", "brown", "gray", "blue-gray"];
lazy_static! { lazy_static! {
static ref TEXT_HTML: mime::Mime = "text/html;charset=utf-8".parse().unwrap(); static ref TEXT_HTML: mime::Mime = "text/html;charset=utf-8".parse().unwrap();
static ref SERVER: Server = Server::new(build_config::HTTP_SERVER.as_str()); static ref SERVER: Server =
Server::new(build_config::HTTP_SERVER.as_str());
} }
header! { (XIdentity, "X-Identity") => [String] } header! { (XIdentity, "X-Identity") => [String] }
@ -27,48 +31,39 @@ header! { (XIdentity, "X-Identity") => [String] }
pub struct Layout<'a, T: 'a + fmt::Display> { pub struct Layout<'a, T: 'a + fmt::Display> {
pub base: Option<&'a str>, pub base: Option<&'a str>,
pub title: &'a str, pub title: &'a str,
pub theme: theme::Theme,
pub body: T, pub body: T,
} }
impl<'a, T: 'a + fmt::Display> Layout<'a, T> { impl<'a, T: 'a + fmt::Display> Layout<'a, T> {
pub fn themes_css(&self) -> &str { pub fn theme(&self) -> &str {
ThemesCss::resource_name() let hash = ::seahash::hash(self.title.as_bytes()) as usize;
} let choice = hash % THEMES.len();
pub fn style_css(&self) -> &str { THEMES[choice]
StyleCss::resource_name()
}
pub fn search_js(&self) -> &str {
SearchJs::resource_name()
} }
pub fn project_name(&self) -> &str { pub fn themes_css(&self) -> &str { ThemesCss::resource_name() }
build_config::PROJECT_NAME pub fn style_css(&self) -> &str { StyleCss::resource_name() }
} pub fn search_js(&self) -> &str { SearchJs::resource_name() }
pub fn version(&self) -> &str {
build_config::VERSION.as_str() pub fn project_name(&self) -> &str { build_config::PROJECT_NAME }
} pub fn version(&self) -> &str { build_config::VERSION.as_str() }
} }
#[derive(BartDisplay)] #[derive(BartDisplay)]
#[template = "templates/system_page_layout.html"] #[template="templates/system_page_layout.html"]
pub struct SystemPageLayout<'a, T: 'a + fmt::Display> { pub struct SystemPageLayout<'a, T: 'a + fmt::Display> {
title: &'a str, title: &'a str,
html_body: T, html_body: T,
} }
pub fn system_page<'a, T>( pub fn system_page<'a, T>(base: Option<&'a str>, title: &'a str, body: T)
base: Option<&'a str>, -> Layout<'a, SystemPageLayout<'a, T>>
title: &'a str,
body: T,
) -> Layout<'a, SystemPageLayout<'a, T>>
where where
T: 'a + fmt::Display, T: 'a + fmt::Display
{ {
Layout { Layout {
base, base,
title, title,
theme: theme::theme_from_str_hash(title),
body: SystemPageLayout { body: SystemPageLayout {
title, title,
html_body: body, html_body: body,
@ -91,39 +86,43 @@ pub struct Site {
impl Site { impl Site {
pub fn new(root: WikiLookup, trust_identity: bool) -> Site { pub fn new(root: WikiLookup, trust_identity: bool) -> Site {
Site { Site { root, trust_identity }
root,
trust_identity,
}
} }
fn not_found(base: Option<&str>) -> Response { fn not_found(base: Option<&str>) -> Response {
Response::new() Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body(system_page(base, "Not found", NotFound).to_string()) .with_body(system_page(
base,
"Not found",
NotFound,
).to_string())
.with_status(hyper::StatusCode::NotFound) .with_status(hyper::StatusCode::NotFound)
} }
fn internal_server_error( fn internal_server_error(base: Option<&str>, err: Box<::std::error::Error + Send + Sync>) -> Response {
base: Option<&str>,
err: Box<dyn ::std::error::Error + Send + Sync>,
) -> Response {
eprintln!("Internal Server Error:\n{:#?}", err); eprintln!("Internal Server Error:\n{:#?}", err);
Response::new() Response::new()
.with_header(ContentType(TEXT_HTML.clone())) .with_header(ContentType(TEXT_HTML.clone()))
.with_body(system_page(base, "Internal server error", InternalServerError).to_string()) .with_body(system_page(
base,
"Internal server error",
InternalServerError,
).to_string())
.with_status(hyper::StatusCode::InternalServerError) .with_status(hyper::StatusCode::InternalServerError)
} }
} }
fn root_base_from_request_uri(path: &str) -> Option<String> { fn root_base_from_request_uri(path: &str) -> Option<String> {
assert!(path.starts_with('/')); use std::iter::repeat;
assert!(path.starts_with("/"));
let slashes = path[1..].matches('/').count(); let slashes = path[1..].matches('/').count();
match slashes { match slashes {
0 => None, 0 => None,
n => Some("../".repeat(n)), n => Some(repeat("../").take(n).collect())
} }
} }
@ -131,7 +130,7 @@ impl Service for Site {
type Request = Request; type Request = Request;
type Response = Response; type Response = Response;
type Error = hyper::Error; type Error = hyper::Error;
type Future = Box<dyn futures::Future<Item = Response, Error = Self::Error>>; type Future = Box<futures::Future<Item = Response, Error = Self::Error>>;
fn call(&self, req: Request) -> Self::Future { fn call(&self, req: Request) -> Self::Future {
let (method, uri, _http_version, headers, body) = req.deconstruct(); let (method, uri, _http_version, headers, body) = req.deconstruct();
@ -143,14 +142,12 @@ impl Service for Site {
false => None, false => None,
}; };
let accept_header = headers.get().cloned().unwrap_or_else(|| Accept(vec![])); let accept_header = headers.get().map(|x: &Accept| x.clone()).unwrap_or(Accept(vec![]));
let base = root_base_from_request_uri(uri.path()); let base = root_base_from_request_uri(uri.path());
let base2 = base.clone(); // Bah, stupid clone let base2 = base.clone(); // Bah, stupid clone
Box::new( Box::new(self.root.scope_lookup(uri.path(), uri.query())
self.root
.lookup(uri.path(), uri.query())
.and_then(move |resource| match resource { .and_then(move |resource| match resource {
Some(mut resource) => { Some(mut resource) => {
use hyper::Method::*; use hyper::Method::*;
@ -161,13 +158,13 @@ impl Service for Site {
Get => resource.get(), Get => resource.get(),
Put => resource.put(body, identity), Put => resource.put(body, identity),
Post => resource.post(body, identity), Post => resource.post(body, identity),
_ => Box::new(futures::finished(resource.method_not_allowed())), _ => Box::new(futures::finished(resource.method_not_allowed()))
} }
} },
None => Box::new(futures::finished(Self::not_found(base.as_deref()))), None => Box::new(futures::finished(Self::not_found(base.as_ref().map(|x| &**x))))
}) })
.or_else(move |err| Ok(Self::internal_server_error(base2.as_deref(), err))) .or_else(move |err| Ok(Self::internal_server_error(base2.as_ref().map(|x| &**x), err)))
.map(|response| response.with_header(SERVER.clone())), .map(|response| response.with_header(SERVER.clone()))
) )
} }
} }

View file

@ -1,13 +1,15 @@
use diesel::prelude::*; use std;
use diesel;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use diesel::prelude::*;
use futures_cpupool::{self, CpuFuture}; use futures_cpupool::{self, CpuFuture};
use r2d2::Pool; use r2d2::Pool;
use r2d2_diesel::ConnectionManager; use r2d2_diesel::ConnectionManager;
use crate::merge; use merge;
use crate::models; use models;
use crate::schema::*; use schema::*;
use crate::theme::Theme;
#[derive(Clone)] #[derive(Clone)]
pub struct State { pub struct State {
@ -15,16 +17,19 @@ pub struct State {
cpu_pool: futures_cpupool::CpuPool, cpu_pool: futures_cpupool::CpuPool,
} }
pub type Error = Box<dyn std::error::Error + Send + Sync>; pub type Error = Box<std::error::Error + Send + Sync>;
pub enum SlugLookup { pub enum SlugLookup {
Miss, Miss,
Hit { article_id: i32, revision: i32 }, Hit {
article_id: i32,
revision: i32,
},
Redirect(String), Redirect(String),
} }
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "article_revisions"] #[table_name="article_revisions"]
struct NewRevision<'a> { struct NewRevision<'a> {
article_id: i32, article_id: i32,
revision: i32, revision: i32,
@ -33,7 +38,6 @@ struct NewRevision<'a> {
body: &'a str, body: &'a str,
author: Option<&'a str>, author: Option<&'a str>,
latest: bool, latest: bool,
theme: Theme,
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -41,16 +45,11 @@ pub struct RebaseConflict {
pub base_article: models::ArticleRevisionStub, pub base_article: models::ArticleRevisionStub,
pub title: merge::MergeResult<char>, pub title: merge::MergeResult<char>,
pub body: merge::MergeResult<String>, pub body: merge::MergeResult<String>,
pub theme: Theme,
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum RebaseResult { enum RebaseResult {
Clean { Clean { title: String, body: String },
title: String,
body: String,
theme: Theme,
},
Conflict(RebaseConflict), Conflict(RebaseConflict),
} }
@ -59,17 +58,11 @@ pub enum UpdateResult {
RebaseConflict(RebaseConflict), RebaseConflict(RebaseConflict),
} }
fn decide_slug( fn decide_slug(conn: &SqliteConnection, article_id: i32, prev_title: &str, title: &str, prev_slug: Option<&str>) -> Result<String, Error> {
conn: &SqliteConnection,
article_id: i32,
prev_title: &str,
title: &str,
prev_slug: Option<&str>,
) -> Result<String, Error> {
let base_slug = ::slug::slugify(title); let base_slug = ::slug::slugify(title);
if let Some(prev_slug) = prev_slug { if let Some(prev_slug) = prev_slug {
if prev_slug.is_empty() { if prev_slug == "" {
// Never give a non-empty slug to the front page // Never give a non-empty slug to the front page
return Ok(String::new()); return Ok(String::new());
} }
@ -83,11 +76,9 @@ fn decide_slug(
} }
} }
let base_slug = if base_slug.is_empty() { let base_slug = if base_slug.is_empty() { "article" } else { &base_slug };
"article"
} else { use schema::article_revisions;
&base_slug
};
let mut slug = base_slug.to_owned(); let mut slug = base_slug.to_owned();
let mut disambiguator = 1; let mut disambiguator = 1;
@ -98,8 +89,7 @@ fn decide_slug(
.filter(article_revisions::slug.eq(&slug)) .filter(article_revisions::slug.eq(&slug))
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
.count() .count()
.first::<i64>(conn)? .first::<i64>(conn)? != 0;
!= 0;
if !slug_in_use { if !slug_in_use {
break Ok(slug); break Ok(slug);
@ -120,6 +110,8 @@ impl<'a> SyncState<'a> {
} }
pub fn get_article_slug(&self, article_id: i32) -> Result<Option<String>, Error> { pub fn get_article_slug(&self, article_id: i32) -> Result<Option<String>, Error> {
use schema::article_revisions;
Ok(article_revisions::table Ok(article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
@ -128,11 +120,9 @@ impl<'a> SyncState<'a> {
.optional()?) .optional()?)
} }
pub fn get_article_revision( pub fn get_article_revision(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevision>, Error> {
&self, use schema::article_revisions;
article_id: i32,
revision: i32,
) -> Result<Option<models::ArticleRevision>, Error> {
Ok(article_revisions::table Ok(article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(revision)) .filter(article_revisions::revision.eq(revision))
@ -140,17 +130,14 @@ impl<'a> SyncState<'a> {
.optional()?) .optional()?)
} }
pub fn query_article_revision_stubs<F>( pub fn query_article_revision_stubs<F>(&self, f: F) -> Result<Vec<models::ArticleRevisionStub>, Error>
&self,
f: F,
) -> Result<Vec<models::ArticleRevisionStub>, Error>
where where
F: 'static + Send + Sync, F: 'static + Send + Sync,
for<'x> F: FnOnce( for <'x> F:
FnOnce(article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>) ->
article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>, article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
) -> article_revisions::BoxedQuery<'x, diesel::sqlite::Sqlite>,
{ {
use crate::schema::article_revisions::dsl::*; use schema::article_revisions::dsl::*;
Ok(f(article_revisions.into_boxed()) Ok(f(article_revisions.into_boxed())
.select(( .select((
@ -162,24 +149,20 @@ impl<'a> SyncState<'a> {
title, title,
latest, latest,
author, author,
theme,
)) ))
.load(self.db_connection)?) .load(self.db_connection)?
)
} }
fn get_article_revision_stub( fn get_article_revision_stub(&self, article_id: i32, revision: i32) -> Result<Option<models::ArticleRevisionStub>, Error> {
&self, use schema::article_revisions;
article_id: i32,
revision: i32, Ok(self.query_article_revision_stubs(move |query| {
) -> Result<Option<models::ArticleRevisionStub>, Error> {
Ok(self
.query_article_revision_stubs(move |query| {
query query
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(revision)) .filter(article_revisions::revision.eq(revision))
.limit(1) .limit(1)
})? })?.pop())
.pop())
} }
pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> { pub fn lookup_slug(&self, slug: String) -> Result<SlugLookup, Error> {
@ -191,8 +174,9 @@ impl<'a> SyncState<'a> {
} }
self.db_connection.transaction(|| { self.db_connection.transaction(|| {
Ok( use schema::article_revisions;
match article_revisions::table
Ok(match article_revisions::table
.filter(article_revisions::slug.eq(slug)) .filter(article_revisions::slug.eq(slug))
.order(article_revisions::sequence_number.desc()) .order(article_revisions::sequence_number.desc())
.select(( .select((
@ -213,154 +197,100 @@ impl<'a> SyncState<'a> {
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
.filter(article_revisions::article_id.eq(stub.article_id)) .filter(article_revisions::article_id.eq(stub.article_id))
.select(article_revisions::slug) .select(article_revisions::slug)
.first::<String>(self.db_connection)?, .first::<String>(self.db_connection)?
),
},
) )
}) })
})
} }
fn rebase_update( fn rebase_update(&self, article_id: i32, target_base_revision: i32, existing_base_revision: i32, title: String, body: String)
&self, -> Result<RebaseResult, Error>
article_id: i32, {
target_base_revision: i32,
existing_base_revision: i32,
title: String,
body: String,
theme: Theme,
) -> Result<RebaseResult, Error> {
let mut title_a = title; let mut title_a = title;
let mut body_a = body; let mut body_a = body;
let mut theme_a = theme;
// TODO: Improve this implementation.
// Weakness: If the range of revisions is big, _one_ request from the
// client can cause _many_ database requests, cheaply causing lots
// of work for the server. Possible attack vector.
// Weakness: When the range is larger than just one iteration, the
// same title and body are retrieved from the database multiple
// times. Unnecessary extra copies.
for revision in existing_base_revision..target_base_revision { for revision in existing_base_revision..target_base_revision {
let mut stored = article_revisions::table let mut stored = article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.ge(revision)) .filter(article_revisions::revision.ge(revision))
.filter(article_revisions::revision.le(revision + 1)) .filter(article_revisions::revision.le(revision+1))
.order(article_revisions::revision.asc()) .order(article_revisions::revision.asc())
.select(( .select((
article_revisions::title, article_revisions::title,
article_revisions::body, article_revisions::body,
article_revisions::theme,
)) ))
.load::<(String, String, Theme)>(self.db_connection)?; .load::<(String, String)>(self.db_connection)?;
let (title_b, body_b, theme_b) = stored.pop().expect("Application layer guarantee"); let (title_b, body_b) = stored.pop().expect("Application layer guarantee");
let (title_o, body_o, theme_o) = stored.pop().expect("Application layer guarantee"); let (title_o, body_o) = stored.pop().expect("Application layer guarantee");
use crate::merge::MergeResult::*; use merge::MergeResult::*;
fn merge_themes(a: Theme, o: Theme, b: Theme) -> Theme {
// Last change wins
if a != o {
a
} else {
b
}
}
let update = { let update = {
let title_merge = merge::merge_chars(&title_a, &title_o, &title_b); let title_merge = merge::merge_chars(&title_a, &title_o, &title_b);
let body_merge = merge::merge_lines(&body_a, &body_o, &body_b); let body_merge = merge::merge_lines(&body_a, &body_o, &body_b);
let theme = merge_themes(theme_a, theme_o, theme_b);
match (title_merge, body_merge) { match (title_merge, body_merge) {
(Clean(title), Clean(body)) => (title, body, theme), (Clean(title), Clean(body)) => (title, body),
(title_merge, body_merge) => { (title_merge, body_merge) => {
return Ok(RebaseResult::Conflict(RebaseConflict { return Ok(RebaseResult::Conflict(RebaseConflict {
base_article: self base_article: self.get_article_revision_stub(article_id, revision+1)?.expect("Application layer guarantee"),
.get_article_revision_stub(article_id, revision + 1)?
.expect("Application layer guarantee"),
title: title_merge, title: title_merge,
body: body_merge.into_strings(), body: body_merge.to_strings(),
theme,
})); }));
} },
} }
}; };
title_a = update.0; title_a = update.0;
body_a = update.1; body_a = update.1;
theme_a = update.2;
} }
Ok(RebaseResult::Clean { Ok(RebaseResult::Clean { title: title_a, body: body_a })
title: title_a,
body: body_a,
theme: theme_a,
})
} }
pub fn update_article( pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
&self, -> Result<UpdateResult, Error>
article_id: i32, {
base_revision: i32,
title: String,
body: String,
author: Option<String>,
theme: Option<Theme>,
) -> Result<UpdateResult, Error> {
if title.is_empty() { if title.is_empty() {
return Err("title cannot be empty".into()); Err("title cannot be empty")?;
} }
self.db_connection.transaction(|| { self.db_connection.transaction(|| {
let (latest_revision, prev_title, prev_slug, prev_theme) = article_revisions::table use schema::article_revisions;
let (latest_revision, prev_title, prev_slug) = article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.order(article_revisions::revision.desc()) .order(article_revisions::revision.desc())
.select(( .select((
article_revisions::revision, article_revisions::revision,
article_revisions::title, article_revisions::title,
article_revisions::slug, article_revisions::slug,
article_revisions::theme,
)) ))
.first::<(i32, String, String, Theme)>(self.db_connection)?; .first::<(i32, String, String)>(self.db_connection)?;
// TODO: If this is an historic edit repeated, just respond OK // TODO: If this is an historic edit repeated, just respond OK
// This scheme would make POST idempotent. // This scheme would make POST idempotent.
if base_revision > latest_revision { if base_revision > latest_revision {
return Err("This edit is based on a future version of the article".into()); Err("This edit is based on a future version of the article")?;
} }
let theme = theme.unwrap_or(prev_theme); let rebase_result = self.rebase_update(article_id, latest_revision, base_revision, title, body)?;
let rebase_result = self.rebase_update(
article_id,
latest_revision,
base_revision,
title,
body,
theme,
)?;
let (title, body, theme) = match rebase_result { let (title, body) = match rebase_result {
RebaseResult::Clean { title, body, theme } => (title, body, theme), RebaseResult::Clean { title, body } => (title, body),
RebaseResult::Conflict(x) => return Ok(UpdateResult::RebaseConflict(x)), RebaseResult::Conflict(x) => return Ok(UpdateResult::RebaseConflict(x)),
}; };
let new_revision = latest_revision + 1; let new_revision = latest_revision + 1;
let slug = decide_slug( let slug = decide_slug(self.db_connection, article_id, &prev_title, &title, Some(&prev_slug))?;
self.db_connection,
article_id,
&prev_title,
&title,
Some(&prev_slug),
)?;
diesel::update( diesel::update(
article_revisions::table article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(latest_revision)), .filter(article_revisions::revision.eq(latest_revision))
) )
.set(article_revisions::latest.eq(false)) .set(article_revisions::latest.eq(false))
.execute(self.db_connection)?; .execute(self.db_connection)?;
@ -372,58 +302,44 @@ impl<'a> SyncState<'a> {
slug: &slug, slug: &slug,
title: &title, title: &title,
body: &body, body: &body,
author: author.as_deref(), author: author.as_ref().map(|x| &**x),
latest: true, latest: true,
theme,
}) })
.execute(self.db_connection)?; .execute(self.db_connection)?;
Ok(UpdateResult::Success( Ok(UpdateResult::Success(article_revisions::table
article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(new_revision)) .filter(article_revisions::revision.eq(new_revision))
.first::<models::ArticleRevision>(self.db_connection)?, .first::<models::ArticleRevision>(self.db_connection)?
)) ))
}) })
} }
pub fn create_article( pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
&self, -> Result<models::ArticleRevision, Error>
target_slug: Option<String>, {
title: String,
body: String,
author: Option<String>,
theme: Theme,
) -> Result<models::ArticleRevision, Error> {
if title.is_empty() { if title.is_empty() {
return Err("title cannot be empty".into()); Err("title cannot be empty")?;
} }
self.db_connection.transaction(|| { self.db_connection.transaction(|| {
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "articles"] #[table_name="articles"]
struct NewArticle { struct NewArticle {
id: Option<i32>, id: Option<i32>
} }
let article_id = { let article_id = {
use diesel::expression::sql_literal::sql; use diesel::expression::sql_literal::sql;
// Diesel and SQLite are a bit in disagreement for how this should look: // Diesel and SQLite are a bit in disagreement for how this should look:
sql::<diesel::sql_types::Integer>("INSERT INTO articles VALUES (null)") sql::<(diesel::sql_types::Integer)>("INSERT INTO articles VALUES (null)")
.execute(self.db_connection)?; .execute(self.db_connection)?;
sql::<diesel::sql_types::Integer>("SELECT LAST_INSERT_ROWID()") sql::<(diesel::sql_types::Integer)>("SELECT LAST_INSERT_ROWID()")
.load::<i32>(self.db_connection)? .load::<i32>(self.db_connection)?
.pop() .pop().expect("Statement must evaluate to an integer")
.expect("Statement must evaluate to an integer")
}; };
let slug = decide_slug( let slug = decide_slug(self.db_connection, article_id, "", &title, target_slug.as_ref().map(|x| &**x))?;
self.db_connection,
article_id,
"",
&title,
target_slug.as_deref(),
)?;
let new_revision = 1; let new_revision = 1;
@ -434,26 +350,20 @@ impl<'a> SyncState<'a> {
slug: &slug, slug: &slug,
title: &title, title: &title,
body: &body, body: &body,
author: author.as_deref(), author: author.as_ref().map(|x| &**x),
latest: true, latest: true,
theme,
}) })
.execute(self.db_connection)?; .execute(self.db_connection)?;
Ok(article_revisions::table Ok(article_revisions::table
.filter(article_revisions::article_id.eq(article_id)) .filter(article_revisions::article_id.eq(article_id))
.filter(article_revisions::revision.eq(new_revision)) .filter(article_revisions::revision.eq(new_revision))
.first::<models::ArticleRevision>(self.db_connection)?) .first::<models::ArticleRevision>(self.db_connection)?
)
}) })
} }
pub fn search_query( pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> Result<Vec<models::SearchResult>, Error> {
&self,
query_string: String,
limit: i32,
offset: i32,
snippet_size: i32,
) -> Result<Vec<models::SearchResult>, Error> {
use diesel::sql_query; use diesel::sql_query;
use diesel::sql_types::{Integer, Text}; use diesel::sql_types::{Integer, Text};
@ -491,10 +401,7 @@ impl<'a> SyncState<'a> {
} }
impl State { impl State {
pub fn new( pub fn new(connection_pool: Pool<ConnectionManager<SqliteConnection>>, cpu_pool: futures_cpupool::CpuPool) -> State {
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
cpu_pool: futures_cpupool::CpuPool,
) -> State {
State { State {
connection_pool, connection_pool,
cpu_pool, cpu_pool,
@ -504,7 +411,7 @@ impl State {
fn execute<F, T>(&self, f: F) -> CpuFuture<T, Error> fn execute<F, T>(&self, f: F) -> CpuFuture<T, Error>
where where
F: 'static + Sync + Send, F: 'static + Sync + Send,
for<'a> F: FnOnce(SyncState<'a>) -> Result<T, Error>, for <'a> F: FnOnce(SyncState<'a>) -> Result<T, Error>,
T: 'static + Send, T: 'static + Send,
{ {
let connection_pool = self.connection_pool.clone(); let connection_pool = self.connection_pool.clone();
@ -520,30 +427,21 @@ impl State {
self.execute(move |state| state.get_article_slug(article_id)) self.execute(move |state| state.get_article_slug(article_id))
} }
pub fn get_article_revision( pub fn get_article_revision(&self, article_id: i32, revision: i32) -> CpuFuture<Option<models::ArticleRevision>, Error> {
&self,
article_id: i32,
revision: i32,
) -> CpuFuture<Option<models::ArticleRevision>, Error> {
self.execute(move |state| state.get_article_revision(article_id, revision)) self.execute(move |state| state.get_article_revision(article_id, revision))
} }
pub fn query_article_revision_stubs<F>( pub fn query_article_revision_stubs<F>(&self, f: F) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
&self,
f: F,
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error>
where where
F: 'static + Send + Sync, F: 'static + Send + Sync,
for<'a> F: FnOnce( for <'a> F:
FnOnce(article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>) ->
article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>, article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
) -> article_revisions::BoxedQuery<'a, diesel::sqlite::Sqlite>,
{ {
self.execute(move |state| state.query_article_revision_stubs(f)) self.execute(move |state| state.query_article_revision_stubs(f))
} }
pub fn get_latest_article_revision_stubs( pub fn get_latest_article_revision_stubs(&self) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
&self,
) -> CpuFuture<Vec<models::ArticleRevisionStub>, Error> {
self.query_article_revision_stubs(|query| { self.query_article_revision_stubs(|query| {
query query
.filter(article_revisions::latest.eq(true)) .filter(article_revisions::latest.eq(true))
@ -555,38 +453,19 @@ impl State {
self.execute(move |state| state.lookup_slug(slug)) self.execute(move |state| state.lookup_slug(slug))
} }
pub fn update_article( pub fn update_article(&self, article_id: i32, base_revision: i32, title: String, body: String, author: Option<String>)
&self, -> CpuFuture<UpdateResult, Error>
article_id: i32, {
base_revision: i32, self.execute(move |state| state.update_article(article_id, base_revision, title, body, author))
title: String,
body: String,
author: Option<String>,
theme: Option<Theme>,
) -> CpuFuture<UpdateResult, Error> {
self.execute(move |state| {
state.update_article(article_id, base_revision, title, body, author, theme)
})
} }
pub fn create_article( pub fn create_article(&self, target_slug: Option<String>, title: String, body: String, author: Option<String>)
&self, -> CpuFuture<models::ArticleRevision, Error>
target_slug: Option<String>, {
title: String, self.execute(move |state| state.create_article(target_slug, title, body, author))
body: String,
author: Option<String>,
theme: Theme,
) -> CpuFuture<models::ArticleRevision, Error> {
self.execute(move |state| state.create_article(target_slug, title, body, author, theme))
} }
pub fn search_query( pub fn search_query(&self, query_string: String, limit: i32, offset: i32, snippet_size: i32) -> CpuFuture<Vec<models::SearchResult>, Error> {
&self,
query_string: String,
limit: i32,
offset: i32,
snippet_size: i32,
) -> CpuFuture<Vec<models::SearchResult>, Error> {
self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size)) self.execute(move |state| state.search_query(query_string, limit, offset, snippet_size))
} }
} }
@ -594,13 +473,13 @@ impl State {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use crate::db; use db;
impl UpdateResult { impl UpdateResult {
pub fn unwrap(self) -> models::ArticleRevision { pub fn unwrap(self) -> models::ArticleRevision {
match self { match self {
UpdateResult::Success(x) => x, UpdateResult::Success(x) => x,
_ => panic!("Expected success"), _ => panic!("Expected success")
} }
} }
} }
@ -609,7 +488,7 @@ mod test {
($state:ident) => { ($state:ident) => {
let db = db::test_connection(); let db = db::test_connection();
let $state = SyncState::new(&db); let $state = SyncState::new(&db);
}; }
} }
#[test] #[test]
@ -621,27 +500,16 @@ mod test {
#[test] #[test]
fn create_article() { fn create_article() {
init!(state); init!(state);
let article_revision = state let article_revision = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
assert_eq!("title", article_revision.slug); assert_eq!("title", article_revision.slug);
assert!(article_revision.latest); assert_eq!(true, article_revision.latest);
assert_eq!(Theme::Cyan, article_revision.theme);
} }
#[test] #[test]
fn create_article_when_empty_slug_then_empty_slug() { fn create_article_when_empty_slug_then_empty_slug() {
// Front page gets to keep its empty slug // Front page gets to keep its empty slug
init!(state); init!(state);
let article_revision = state let article_revision = state.create_article(Some("".into()), "Title".into(), "Body".into(), None).unwrap();
.create_article(
Some("".into()),
"Title".into(),
"Body".into(),
None,
Theme::Cyan,
)
.unwrap();
assert_eq!("", article_revision.slug); assert_eq!("", article_revision.slug);
} }
@ -649,21 +517,9 @@ mod test {
fn update_article() { fn update_article() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
let new_revision = state let new_revision = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None).unwrap().unwrap();
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"New body".into(),
None,
Some(Theme::BlueGray),
)
.unwrap()
.unwrap();
assert_eq!(article.article_id, new_revision.article_id); assert_eq!(article.article_id, new_revision.article_id);
@ -676,135 +532,46 @@ mod test {
assert_eq!(article.slug, new_revision.slug); assert_eq!(article.slug, new_revision.slug);
assert_eq!("New body", new_revision.body); assert_eq!("New body", new_revision.body);
assert_eq!(Theme::BlueGray, new_revision.theme);
} }
#[test] #[test]
fn update_article_when_sequential_edits_then_last_wins() { fn update_article_when_sequential_edits_then_last_wins() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "Title".into(), "Body".into(), None).unwrap();
.create_article(None, "Title".into(), "Body".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "New body".into(), None).unwrap().unwrap();
.update_article( let second_edit = state.update_article(article.article_id, first_edit.revision, article.title.clone(), "Newer body".into(), None).unwrap().unwrap();
article.article_id,
article.revision,
article.title.clone(),
"New body".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
first_edit.revision,
article.title,
"Newer body".into(),
None,
Some(Theme::Amber),
)
.unwrap()
.unwrap();
assert_eq!("Newer body", second_edit.body); assert_eq!("Newer body", second_edit.body);
assert_eq!(Theme::Amber, second_edit.theme);
} }
#[test] #[test]
fn update_article_when_edit_conflict_then_merge() { fn update_article_when_edit_conflict_then_merge() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None).unwrap();
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx\nb\nc\n".into(), None).unwrap().unwrap();
.update_article( let second_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None).unwrap().unwrap();
article.article_id,
article.revision,
article.title.clone(),
"a\nx\nb\nc\n".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nb\ny\nc\n".into(),
None,
Some(Theme::Amber),
)
.unwrap()
.unwrap();
assert!(article.revision < first_edit.revision); assert!(article.revision < first_edit.revision);
assert!(first_edit.revision < second_edit.revision); assert!(first_edit.revision < second_edit.revision);
assert_eq!("a\nx\nb\ny\nc\n", second_edit.body); assert_eq!("a\nx\nb\ny\nc\n", second_edit.body);
assert_eq!(Theme::Amber, second_edit.theme);
} }
#[test] #[test]
fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() { fn update_article_when_edit_conflict_then_rebase_over_multiple_revisions() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "Title".into(), "a\nb\nc\n".into(), None).unwrap();
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let edit = state let edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nx1\nb\nc\n".into(), None).unwrap().unwrap();
.update_article( let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nb\nc\n".into(), None).unwrap().unwrap();
article.article_id, let edit = state.update_article(article.article_id, edit.revision, article.title.clone(), "a\nx1\nx2\nx3\nb\nc\n".into(), None).unwrap().unwrap();
article.revision,
article.title.clone(),
"a\nx1\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let edit = state
.update_article(
article.article_id,
edit.revision,
article.title.clone(),
"a\nx1\nx2\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let edit = state
.update_article(
article.article_id,
edit.revision,
article.title.clone(),
"a\nx1\nx2\nx3\nb\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let rebase_edit = state let rebase_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "a\nb\ny\nc\n".into(), None).unwrap().unwrap();
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nb\ny\nc\n".into(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
assert!(article.revision < edit.revision); assert!(article.revision < edit.revision);
assert!(edit.revision < rebase_edit.revision); assert!(edit.revision < rebase_edit.revision);
@ -816,32 +583,10 @@ mod test {
fn update_article_when_title_edit_conflict_then_merge_title() { fn update_article_when_title_edit_conflict_then_merge_title() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "titlle".into(), "".into(), None).unwrap();
.create_article(None, "titlle".into(), "".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state let first_edit = state.update_article(article.article_id, article.revision, "Titlle".into(), article.body.clone(), None).unwrap().unwrap();
.update_article( let second_edit = state.update_article(article.article_id, article.revision, "title".into(), article.body.clone(), None).unwrap().unwrap();
article.article_id,
article.revision,
"Titlle".into(),
article.body.clone(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
"title".into(),
article.body.clone(),
None,
Some(article.theme),
)
.unwrap()
.unwrap();
assert!(article.revision < first_edit.revision); assert!(article.revision < first_edit.revision);
assert!(first_edit.revision < second_edit.revision); assert!(first_edit.revision < second_edit.revision);
@ -853,110 +598,20 @@ mod test {
fn update_article_when_merge_conflict() { fn update_article_when_merge_conflict() {
init!(state); init!(state);
let article = state let article = state.create_article(None, "Title".into(), "a".into(), None).unwrap();
.create_article(None, "Title".into(), "a".into(), None, Theme::Cyan)
.unwrap();
let first_edit = state let first_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "b".into(), None).unwrap().unwrap();
.update_article( let conflict_edit = state.update_article(article.article_id, article.revision, article.title.clone(), "c".into(), None).unwrap();
article.article_id,
article.revision,
article.title.clone(),
"b".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let conflict_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"c".into(),
None,
Some(Theme::Amber),
)
.unwrap();
match conflict_edit { match conflict_edit {
UpdateResult::Success(..) => panic!("Expected conflict"), UpdateResult::Success(..) => panic!("Expected conflict"),
UpdateResult::RebaseConflict(RebaseConflict { UpdateResult::RebaseConflict(RebaseConflict { base_article, title, body }) => {
base_article,
title,
body,
theme,
}) => {
assert_eq!(first_edit.revision, base_article.revision); assert_eq!(first_edit.revision, base_article.revision);
assert_eq!(title, merge::MergeResult::Clean(article.title)); assert_eq!(title, merge::MergeResult::Clean(article.title.clone()));
assert_eq!( assert_eq!(body, merge::MergeResult::Conflicted(vec![
body, merge::Output::Conflict(vec!["c"], vec!["a"], vec!["b"]),
merge::MergeResult::Conflicted(vec![merge::Output::Conflict( ]).to_strings());
vec!["c"],
vec!["a"],
vec!["b"]
),])
.into_strings()
);
assert_eq!(Theme::Amber, theme);
} }
}; };
} }
#[test]
fn update_article_when_theme_conflict_then_ignore_unchanged() {
init!(state);
let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let _first_edit = state
.update_article(
article.article_id,
article.revision,
article.title.clone(),
"a\nx\nb\nc\n".into(),
None,
Some(Theme::Blue),
)
.unwrap()
.unwrap();
let second_edit = state
.update_article(
article.article_id,
article.revision,
article.title,
"a\nb\ny\nc\n".into(),
None,
Some(Theme::Cyan),
)
.unwrap()
.unwrap();
assert_eq!(Theme::Blue, second_edit.theme);
}
#[test]
fn update_article_with_no_given_theme_then_theme_unchanged() {
init!(state);
let article = state
.create_article(None, "Title".into(), "a\nb\nc\n".into(), None, Theme::Cyan)
.unwrap();
let edit = state
.update_article(
article.article_id,
article.revision,
article.title,
article.body,
None,
None,
)
.unwrap()
.unwrap();
assert_eq!(Theme::Cyan, edit.theme);
}
} }

View file

@ -1,232 +0,0 @@
use std::io::Write;
use diesel::backend::Backend;
use diesel::deserialize::{self, FromSql};
use diesel::serialize::{self, Output, ToSql};
use diesel::sql_types::Text;
use diesel::sqlite::Sqlite;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)] // Serde
#[serde(rename_all = "kebab-case")]
#[derive(AsExpression, FromSqlRow)] // Diesel
#[sql_type = "Text"]
pub enum Theme {
Red,
Pink,
Purple,
DeepPurple,
Indigo,
Blue,
LightBlue,
Cyan,
Teal,
Green,
LightGreen,
Lime,
Yellow,
Amber,
Orange,
DeepOrange,
Brown,
Gray,
BlueGray,
}
use self::Theme::*;
forward_display_to_serde!(Theme);
forward_from_str_to_serde!(Theme);
pub const THEMES: [Theme; 19] = [
Red, Pink, Purple, DeepPurple, Indigo, Blue, LightBlue, Cyan, Teal, Green, LightGreen, Lime,
Yellow, Amber, Orange, DeepOrange, Brown, Gray, BlueGray,
];
pub fn theme_from_str_hash(x: &str) -> Theme {
let hash = seahash::hash(x.as_bytes()) as usize;
let choice = hash % THEMES.len();
THEMES[choice]
}
pub fn random() -> Theme {
use rand::Rng;
*rand::thread_rng()
.choose(&THEMES)
.expect("Could only fail for an empty slice")
}
impl ToSql<Text, Sqlite> for Theme {
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
ToSql::<Text, Sqlite>::to_sql(&self.to_string(), out)
}
}
impl FromSql<Text, Sqlite> for Theme {
fn from_sql(value: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
// See Diesel's documentation on how to implement FromSql for Sqlite,
// especially with regards to the unsafe conversion below.
// http://docs.diesel.rs/diesel/deserialize/trait.FromSql.html
let text_ptr = <*const str as FromSql<Text, Sqlite>>::from_sql(value)?;
let text = unsafe { &*text_ptr };
text.parse().map_err(Into::into)
}
}
pub struct CssClass(Theme);
impl Theme {
pub fn css_class(self) -> CssClass {
CssClass(self)
}
}
use std::fmt::{self, Display, Formatter};
impl Display for CssClass {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "theme-{}", self.0)
}
}
#[cfg(test)]
mod test {
use std::error::Error;
use diesel::prelude::*;
use diesel::sql_query;
use diesel::sql_types::Text;
use super::*;
#[test]
fn basic_serialize() {
assert_eq!(serde_plain::to_string(&Theme::Red).unwrap(), "red");
}
#[test]
fn serialize_kebab_case() {
assert_eq!(
serde_plain::to_string(&Theme::LightGreen).unwrap(),
"light-green"
);
}
#[test]
fn serialize_json() {
#[derive(Serialize)]
struct Test {
x: Theme,
}
assert_eq!(
serde_json::to_string(&Test { x: Theme::Red }).unwrap(),
"{\"x\":\"red\"}"
);
}
#[test]
fn deserialize_json() {
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct Test {
x: Theme,
}
assert_eq!(
serde_json::from_str::<Test>("{\"x\":\"red\"}").unwrap(),
Test { x: Theme::Red }
);
}
#[test]
fn serialize_urlencoded() {
#[derive(Serialize)]
struct Test {
x: Theme,
}
assert_eq!(
serde_urlencoded::to_string(&Test { x: Theme::Red }).unwrap(),
"x=red"
);
}
#[test]
fn deserialize_urlencoded() {
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct Test {
x: Theme,
}
assert_eq!(
serde_urlencoded::from_str::<Test>("x=red").unwrap(),
Test { x: Theme::Red }
);
}
#[test]
fn basic_display() {
assert_eq!(&Theme::Red.to_string(), "red");
}
#[test]
fn display_kebab_case() {
assert_eq!(&Theme::LightGreen.to_string(), "light-green");
}
#[test]
fn basic_from_str() {
let indigo: Theme = "indigo".parse().unwrap();
assert_eq!(indigo, Theme::Indigo);
}
#[test]
fn to_number() {
assert_eq!(Theme::Red as i32, 0);
assert_eq!(Theme::LightGreen as i32, 10);
assert_eq!(Theme::BlueGray as i32, 18);
}
#[test]
fn from_str_hash() {
assert_eq!(theme_from_str_hash("Bartefjes"), Theme::Orange);
}
#[test]
fn css_class_display() {
assert_eq!(&Theme::Red.css_class().to_string(), "theme-red");
}
#[test]
fn basic_db_roundtrip() -> Result<(), Box<dyn Error>> {
let conn = SqliteConnection::establish(":memory:")?;
#[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row {
#[sql_type = "Text"]
theme: Theme,
}
let res = sql_query("SELECT ? as theme")
.bind::<Text, _>(DeepPurple)
.load::<Row>(&conn)?;
assert_eq!(&[Row { theme: DeepPurple }], res.as_slice());
Ok(())
}
#[test]
fn db_invalid_value_gives_error() -> Result<(), Box<dyn Error>> {
let conn = SqliteConnection::establish(":memory:")?;
#[derive(QueryableByName, PartialEq, Eq, Debug)]
struct Row {
#[sql_type = "Text"]
theme: Theme,
}
let res = sql_query("SELECT 'green' as theme").load::<Row>(&conn);
assert!(res.is_ok());
let res = sql_query("SELECT 'blueish-yellow' as theme").load::<Row>(&conn);
assert!(res.is_err());
Ok(())
}
}

View file

@ -1,7 +0,0 @@
pub trait Lookup {
type Resource;
type Error;
type Future: futures::Future<Item = Option<Self::Resource>, Error = Self::Error>;
fn lookup(&self, path: &str, query: Option<&str>) -> Self::Future;
}

View file

@ -1,5 +1,5 @@
mod lookup;
mod resource; mod resource;
mod scope;
pub use self::lookup::*;
pub use self::resource::*; pub use self::resource::*;
pub use self::scope::*;

View file

@ -1,13 +1,15 @@
use futures;
use futures::{Future, Stream}; use futures::{Future, Stream};
use hyper::server::Response;
use hyper::{self, header, mime, server}; use hyper::{self, header, mime, server};
use hyper::server::Response;
use std;
lazy_static! { lazy_static! {
static ref TEXT_PLAIN: mime::Mime = "text/plain;charset=utf-8".parse().unwrap(); static ref TEXT_PLAIN: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
} }
pub type Error = Box<dyn std::error::Error + Send + Sync>; pub type Error = Box<std::error::Error + Send + Sync>;
pub type ResponseFuture = Box<dyn futures::Future<Item = server::Response, Error = Error>>; pub type ResponseFuture = Box<futures::Future<Item = server::Response, Error = Error>>;
pub trait Resource { pub trait Resource {
fn allow(&self) -> Vec<hyper::Method>; fn allow(&self) -> Vec<hyper::Method>;
@ -21,24 +23,22 @@ pub trait Resource {
} }
fn put(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture fn put(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
where where Self: 'static
Self: 'static,
{ {
Box::new( Box::new(body
body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) }) .fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
.map_err(Into::into) .map_err(Into::into)
.and_then(move |_| futures::finished(self.method_not_allowed())), .and_then(move |_| futures::finished(self.method_not_allowed()))
) )
} }
fn post(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture fn post(self: Box<Self>, body: hyper::Body, _identity: Option<String>) -> ResponseFuture
where where Self: 'static
Self: 'static,
{ {
Box::new( Box::new(body
body.fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) }) .fold((), |_, _| -> Result<(), hyper::Error> { Ok(()) })
.map_err(Into::into) .map_err(Into::into)
.and_then(move |_| futures::finished(self.method_not_allowed())), .and_then(move |_| futures::finished(self.method_not_allowed()))
) )
} }

9
src/web/scope.rs Normal file
View file

@ -0,0 +1,9 @@
use futures;
pub trait Scope {
type Resource;
type Error;
type Future: futures::Future<Item=Option<Self::Resource>, Error=Self::Error>;
fn scope_lookup(&self, path: &str, query: Option<&str>) -> Self::Future;
}

View file

@ -2,23 +2,24 @@ use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::Utf8Error; use std::str::Utf8Error;
use futures::{Future, finished, failed, done};
use futures::future::FutureResult; use futures::future::FutureResult;
use futures::{done, failed, finished, Future};
use percent_encoding::percent_decode; use percent_encoding::percent_decode;
use slug::slugify; use slug::slugify;
use crate::resources::*; use resources::*;
use crate::state::State; use components::*;
use crate::web::{Lookup, Resource}; use state::State;
use web::{Scope, Resource};
#[allow(unused)] #[allow(unused)]
use crate::assets::*; use assets::*;
type BoxResource = Box<dyn Resource + Sync + Send>; type BoxResource = Box<Resource + Sync + Send>;
type ResourceFn = Box<dyn Fn() -> BoxResource + Sync + Send>; type ResourceFn = Box<Fn() -> BoxResource + Sync + Send>;
lazy_static! { lazy_static! {
static ref LICENSES_MAP: HashMap<&'static str, ResourceFn> = hashmap! { static ref LICENSES_MAP: HashMap<&'static str, ResourceFn> = hashmap!{
"bsd-3-clause" => Box::new(|| Box::new( "bsd-3-clause" => Box::new(|| Box::new(
HtmlResource::new(Some("../"), "The 3-Clause BSD License", include_str!("licenses/bsd-3-clause.html")) HtmlResource::new(Some("../"), "The 3-Clause BSD License", include_str!("licenses/bsd-3-clause.html"))
) as BoxResource) as ResourceFn, ) as BoxResource) as ResourceFn,
@ -40,7 +41,7 @@ lazy_static! {
#[derive(Clone)] #[derive(Clone)]
pub struct WikiLookup { pub struct WikiLookup {
state: State, state: State,
changes_lookup: ChangesLookup, changes_lookup: changes::Scope,
diff_lookup: DiffLookup, diff_lookup: DiffLookup,
search_lookup: SearchLookup, search_lookup: SearchLookup,
} }
@ -54,10 +55,9 @@ fn split_one(path: &str) -> Result<(Cow<str>, Option<&str>), Utf8Error> {
Ok((head, tail)) Ok((head, tail))
} }
fn map_lookup( fn map_lookup(map: &HashMap<&str, ResourceFn>, path: &str) ->
map: &HashMap<&str, ResourceFn>, FutureResult<Option<BoxResource>, Box<::std::error::Error + Send + Sync>>
path: &str, {
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
let (head, tail) = match split_one(path) { let (head, tail) = match split_one(path) {
Ok(x) => x, Ok(x) => x,
Err(x) => return failed(x.into()), Err(x) => return failed(x.into()),
@ -74,14 +74,13 @@ fn map_lookup(
} }
#[allow(unused)] #[allow(unused)]
fn fs_lookup( fn fs_lookup(root: &str, path: &str) ->
root: &str, FutureResult<Option<BoxResource>, Box<::std::error::Error + Send + Sync>>
path: &str, {
) -> FutureResult<Option<BoxResource>, Box<dyn ::std::error::Error + Send + Sync>> {
use std::fs::File; use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
let extension = path.rsplit_once('.').map(|x| x.1); let extension = path.rsplitn(2, ".").next();
let content_type = match extension { let content_type = match extension {
Some("html") => "text/html", Some("html") => "text/html",
@ -89,37 +88,32 @@ fn fs_lookup(
Some("js") => "application/javascript", Some("js") => "application/javascript",
Some("woff") => "application/font-woff", Some("woff") => "application/font-woff",
_ => "application/binary", _ => "application/binary",
} }.parse().unwrap();
.parse()
.unwrap();
let mut filename = root.to_string(); let mut filename = root.to_string();
filename.push_str(path); filename.push_str(path);
let mut f = File::open(&filename).unwrap_or_else(|_| panic!("Not found: {}", filename)); let mut f = File::open(&filename)
.unwrap_or_else(|_| panic!(format!("Not found: {}", filename)));
let mut body = Vec::new(); let mut body = Vec::new();
f.read_to_end(&mut body).expect("Unable to read file"); f.read_to_end(&mut body)
.expect("Unable to read file");
finished(Some(Box::new(ReadOnlyResource { content_type, body }))) finished(Some(Box::new(ReadOnlyResource { content_type, body })))
} }
impl WikiLookup { impl WikiLookup {
pub fn new(state: State, show_authors: bool) -> WikiLookup { pub fn new(state: State, show_authors: bool) -> WikiLookup {
let changes_lookup = ChangesLookup::new(state.clone(), show_authors); let changes_lookup = changes::Scope::new(state.clone(), show_authors);
let diff_lookup = DiffLookup::new(state.clone()); let diff_lookup = DiffLookup::new(state.clone());
let search_lookup = SearchLookup::new(state.clone()); let search_lookup = SearchLookup::new(state.clone());
WikiLookup { WikiLookup { state, changes_lookup, diff_lookup, search_lookup }
state,
changes_lookup,
diff_lookup,
search_lookup,
}
} }
fn revisions_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Lookup>::Future { fn revisions_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Scope>::Future {
let (article_id, revision): (i32, i32) = match (|| -> Result<_, <Self as Lookup>::Error> { let (article_id, revision): (i32, i32) = match (|| -> Result<_, <Self as Scope>::Error> {
let (article_id, tail) = split_one(path)?; let (article_id, tail) = split_one(path)?;
let (revision, tail) = split_one(tail.ok_or("Not found")?)?; let (revision, tail) = split_one(tail.ok_or("Not found")?)?;
if tail.is_some() { if tail.is_some() {
@ -133,17 +127,17 @@ impl WikiLookup {
}; };
Box::new( Box::new(
self.state self.state.get_article_revision(article_id, revision)
.get_article_revision(article_id, revision) .and_then(|article_revision|
.and_then(|article_revision| { Ok(article_revision.map(move |x| Box::new(
Ok(article_revision ArticleRevisionResource::new(x)
.map(move |x| Box::new(ArticleRevisionResource::new(x)) as BoxResource)) ) as BoxResource))
}), )
) )
} }
fn by_id_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Lookup>::Future { fn by_id_lookup(&self, path: &str, _query: Option<&str>) -> <Self as Scope>::Future {
let article_id: i32 = match (|| -> Result<_, <Self as Lookup>::Error> { let article_id: i32 = match (|| -> Result<_, <Self as Scope>::Error> {
let (article_id, tail) = split_one(path)?; let (article_id, tail) = split_one(path)?;
if tail.is_some() { if tail.is_some() {
return Err("Not found".into()); return Err("Not found".into());
@ -155,15 +149,18 @@ impl WikiLookup {
Err(_) => return Box::new(finished(None)), Err(_) => return Box::new(finished(None)),
}; };
Box::new(self.state.get_article_slug(article_id).and_then(|slug| { Box::new(
Ok(slug.map(|slug| { self.state.get_article_slug(article_id)
Box::new(TemporaryRedirectResource::new(format!("../{}", slug))) as BoxResource .and_then(|slug|
})) Ok(slug.map(|slug| Box::new(
})) TemporaryRedirectResource::new(format!("../{}", slug))
) as BoxResource))
)
)
} }
fn diff_lookup_f(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future { fn diff_lookup_f(&self, path: &str, query: Option<&str>) -> <Self as Scope>::Future {
let article_id: u32 = match (|| -> Result<_, <Self as Lookup>::Error> { let article_id: u32 = match (|| -> Result<_, <Self as Scope>::Error> {
let (article_id, tail) = split_one(path)?; let (article_id, tail) = split_one(path)?;
if tail.is_some() { if tail.is_some() {
return Err("Not found".into()); return Err("Not found".into());
@ -178,42 +175,42 @@ impl WikiLookup {
Box::new(self.diff_lookup.lookup(article_id, query)) Box::new(self.diff_lookup.lookup(article_id, query))
} }
fn reserved_lookup(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future { fn reserved_lookup(&self, path: &str, query: Option<&str>) -> <Self as Scope>::Future {
let (head, tail) = match split_one(path) { let (head, tail) = match split_one(path) {
Ok(x) => x, Ok(x) => x,
Err(x) => return Box::new(failed(x.into())), Err(x) => return Box::new(failed(x.into())),
}; };
match (head.as_ref(), tail) { match (head.as_ref(), tail) {
("_about", None) => Box::new(finished(Some( ("_about", None) =>
Box::new(AboutResource::new()) as BoxResource Box::new(finished(Some(Box::new(AboutResource::new()) as BoxResource))),
))), ("_about", Some(license)) =>
("_about", Some(license)) => Box::new(map_lookup(&LICENSES_MAP, license)), Box::new(map_lookup(&LICENSES_MAP, license)),
#[cfg(feature = "dynamic-assets")] #[cfg(feature="dynamic-assets")]
("_assets", Some(asset)) => Box::new(fs_lookup( ("_assets", Some(asset)) =>
concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"), Box::new(fs_lookup(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/"), asset)),
asset, #[cfg(not(feature="dynamic-assets"))]
)), ("_assets", Some(asset)) =>
#[cfg(not(feature = "dynamic-assets"))] Box::new(map_lookup(&ASSETS_MAP, asset)),
("_assets", Some(asset)) => Box::new(map_lookup(&ASSETS_MAP, asset)), ("_by_id", Some(tail)) =>
("_by_id", Some(tail)) => self.by_id_lookup(tail, query), self.by_id_lookup(tail, query),
("_changes", None) => Box::new(self.changes_lookup.lookup(query)), ("_changes", None) =>
("_diff", Some(tail)) => self.diff_lookup_f(tail, query), Box::new(self.changes_lookup.lookup(query)),
("_new", None) => Box::new(finished(Some(Box::new(NewArticleResource::new( ("_diff", Some(tail)) =>
self.state.clone(), self.diff_lookup_f(tail, query),
None, ("_new", None) =>
true, Box::new(finished(Some(Box::new(NewArticleResource::new(self.state.clone(), None)) as BoxResource))),
)) as BoxResource))), ("_revisions", Some(tail)) =>
("_revisions", Some(tail)) => self.revisions_lookup(tail, query), self.revisions_lookup(tail, query),
("_search", None) => Box::new(done(self.search_lookup.lookup(query))), ("_search", None) =>
("_sitemap", None) => Box::new(finished(Some(Box::new(SitemapResource::new( Box::new(done(self.search_lookup.lookup(query))),
self.state.clone(), ("_sitemap", None) =>
)) as BoxResource))), Box::new(finished(Some(Box::new(SitemapResource::new(self.state.clone())) as BoxResource))),
_ => Box::new(finished(None)), _ => Box::new(finished(None)),
} }
} }
fn article_lookup(&self, path: &str, query: Option<&str>) -> <Self as Lookup>::Future { fn article_lookup(&self, path: &str, query: Option<&str>) -> <Self as Scope>::Future {
let (slug, tail) = match split_one(path) { let (slug, tail) = match split_one(path) {
Ok(x) => x, Ok(x) => x,
Err(x) => return Box::new(failed(x.into())), Err(x) => return Box::new(failed(x.into())),
@ -224,49 +221,42 @@ impl WikiLookup {
return Box::new(finished(None)); return Box::new(finished(None));
} }
let edit = query == Some("edit");
// Normalize all user-generated slugs: // Normalize all user-generated slugs:
let slugified_slug = slugify(&slug); let slugified_slug = slugify(&slug);
if slugified_slug != slug { if slugified_slug != slug {
return Box::new(finished(Some( return Box::new(finished(Some(
Box::new(TemporaryRedirectResource::from_slug(slugified_slug, edit)) as BoxResource, Box::new(TemporaryRedirectResource::from_slug(slugified_slug)) as BoxResource
))); )));
} }
let state = self.state.clone(); let state = self.state.clone();
let edit = query == Some("edit");
let slug = slug.into_owned(); let slug = slug.into_owned();
use crate::state::SlugLookup; use state::SlugLookup;
Box::new(self.state.lookup_slug(slug.clone()).and_then(move |x| { Box::new(self.state.lookup_slug(slug.clone())
Ok(Some(match x { .and_then(move |x| Ok(Some(match x {
SlugLookup::Miss => { SlugLookup::Miss =>
Box::new(NewArticleResource::new(state, Some(slug), edit)) as BoxResource Box::new(NewArticleResource::new(state, Some(slug))) as BoxResource,
} SlugLookup::Hit { article_id, revision } =>
SlugLookup::Hit { Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource,
article_id, SlugLookup::Redirect(slug) =>
revision, Box::new(TemporaryRedirectResource::from_slug(slug)) as BoxResource,
} => { })))
Box::new(ArticleResource::new(state, article_id, revision, edit)) as BoxResource )
}
SlugLookup::Redirect(slug) => {
Box::new(TemporaryRedirectResource::from_slug(slug, edit)) as BoxResource
}
}))
}))
} }
} }
impl Lookup for WikiLookup { impl Scope for WikiLookup {
type Resource = BoxResource; type Resource = BoxResource;
type Error = Box<dyn ::std::error::Error + Send + Sync>; type Error = Box<::std::error::Error + Send + Sync>;
type Future = Box<dyn Future<Item = Option<Self::Resource>, Error = Self::Error>>; type Future = Box<Future<Item = Option<Self::Resource>, Error = Self::Error>>;
fn lookup(&self, path: &str, query: Option<&str>) -> Self::Future { fn scope_lookup(&self, path: &str, query: Option<&str>) -> Self::Future {
assert!(path.starts_with('/')); assert!(path.starts_with("/"));
let path = &path[1..]; let path = &path[1..];
if path.starts_with('_') { if path.starts_with("_") {
self.reserved_lookup(path, query) self.reserved_lookup(path, query)
} else { } else {
self.article_lookup(path, query) self.article_lookup(path, query)

View file

@ -5,7 +5,7 @@
{{>article_contents.html}} {{>article_contents.html}}
</div> </div>
<form autocomplete="off" id="article-editor" action="" method="POST"> <form id="article-editor" action="" method="POST">
<div class="editor"> <div class="editor">
<div class="hero"> <div class="hero">
@ -14,12 +14,6 @@
</header> </header>
</div> </div>
<div class="theme-picker">
{{#themes}}
<input autocomplete="off" type="radio" name="theme" value="{{.theme}}"{{#.selected?}} checked{{/.selected}} class="theme-picker--option {{.theme.css_class()}} themed">
{{/themes}}
</div>
<article> <article>
<p> <p>
<input autocomplete=off type=hidden name=base_revision value="{{revision}}"> <input autocomplete=off type=hidden name=base_revision value="{{revision}}">
@ -31,20 +25,10 @@
</div> </div>
<div class="editor-controls"> <div class="editor-controls">
{{#edit?}} {{#cancel_url}}
<div class="cancel-interaction-group {{#cancel_url}}interaction-group--root--enabled{{/cancel_url}}{{^cancel_url}}interaction-group--root--disabled{{/cancel_url}}"> <a class="cancel" href="{{.}}">Cancel</a>
<a class="interaction-group--enabled button button-cancel cancel" href="{{#cancel_url}}{{.}}{{/cancel_url}}">Cancel</a> {{/cancel_url}}
<button class="interaction-group--disabled button button-cancel" disabled>Cancel</a> <button type=submit>Save</button>
</div>
<button class="button button-default" type=submit {{^edit?}}disabled{{/edit}}>Save</button>
{{/edit}}
{{^edit?}}
<div class="cancel-interaction-group interaction-group--root--disabled">
<a class="interaction-group--enabled button button-cancel cancel" href="{{#cancel_url}}{{.}}{{/cancel_url}}">Cancel</a>
<button class="interaction-group--disabled button button-cancel" disabled>Cancel</a>
</div>
<button class="button button-default" type=submit disabled>Save</button>
{{/edit}}
</div> </div>
</form> </form>

View file

@ -10,7 +10,6 @@
<p> <p>
You are viewing the difference between two {{#consecutive?}}consecutive{{/consecutive}} You are viewing the difference between two {{#consecutive?}}consecutive{{/consecutive}}
revisions of <a href="_by_id/{{article_id}}">this article</a>. revisions of <a href="_by_id/{{article_id}}">this article</a>.
{{#author}}This changeset was authored by <a href="{{..author_link}}">{{.}}</a>.{{/author}}
</p> </p>
<p> <p>

View file

@ -9,7 +9,7 @@
<link href="_assets/{{style_css()}}" rel="stylesheet"> <link href="_assets/{{style_css()}}" rel="stylesheet">
<meta name="generator" content="{{project_name()}} {{version()}}" /> <meta name="generator" content="{{project_name()}} {{version()}}" />
</head> </head>
<body class="{{theme.css_class()}}"> <body class="theme-{{theme()}}">
{{>search_input.html}} {{>search_input.html}}
{{{body}}} {{{body}}}
</body> </body>

View file

@ -1,7 +1,7 @@
<div class="search-container"> <div class="search-container">
<form class="search keyboard-focus-control" action=_search method=GET> <form class="search keyboard-focus-control" action=_search method=GET>
<div class="search-widget-container"> <div class="search-widget-container">
<input data-focusindex="0" type=search name=q placeholder=Search autocomplete=off> <input data-focusindex="0" type=search name=q placeholder=search autocomplete=off>
<ul class="live-results search-results"> <ul class="live-results search-results">
</ul> </ul>
</div> </div>