Compare commits

...

17 Commits

Author SHA1 Message Date
b6de727883 page rewrite
Some checks failed
CI / Check typos (push) Successful in 1m3s
CI / Check links (push) Failing after 1m14s
CI / Clippy (push) Successful in 1m43s
CI / Build and test (push) Successful in 1m31s
CI / Build container (push) Successful in 1m45s
CI / Deploy on waypoint (push) Failing after 1m23s
2025-11-16 12:58:20 -08:00
04d98462dd Return 404 for 404 page
Some checks failed
CI / Check typos (push) Successful in 8s
CI / Check links (push) Failing after 13s
CI / Clippy (push) Successful in 54s
CI / Build and test (push) Successful in 1m20s
CI / Build container (push) Has been skipped
CI / Deploy on waypoint (push) Has been skipped
2025-11-14 09:46:18 -08:00
991eb92562 Tweaks
Some checks failed
CI / Check typos (push) Successful in 28s
CI / Check links (push) Failing after 31s
CI / Clippy (push) Successful in 1m2s
CI / Build and test (push) Successful in 1m21s
CI / Build container (push) Successful in 1m5s
CI / Deploy on waypoint (push) Successful in 44s
2025-11-12 14:18:44 -08:00
529dfc468e README 2025-11-12 14:18:41 -08:00
6493476565 TTL 2025-11-12 13:59:40 -08:00
d5067ff381 404 2025-11-12 13:59:38 -08:00
532cfe58ba Minor tweaks
Some checks failed
CI / Check typos (push) Successful in 8s
CI / Clippy (push) Successful in 59s
CI / Check links (push) Failing after 1m32s
CI / Build and test (push) Successful in 1m21s
CI / Build container (push) Has been skipped
CI / Deploy on waypoint (push) Has been skipped
2025-11-09 21:20:47 -08:00
c13618e958 Transform images + placeholders
Some checks failed
CI / Check typos (push) Successful in 8s
CI / Check links (push) Failing after 10s
CI / Clippy (push) Successful in 56s
CI / Build and test (push) Successful in 1m22s
CI / Build container (push) Successful in 1m4s
CI / Deploy on waypoint (push) Successful in 46s
2025-11-08 13:12:25 -08:00
1329539059 Add pixel-transform 2025-11-08 13:12:23 -08:00
e70170ee5b Merge asset and page server 2025-11-08 09:33:12 -08:00
6cb54c2300 Generic servable
Some checks failed
CI / Check typos (push) Successful in 8s
CI / Check links (push) Failing after 11s
CI / Clippy (push) Successful in 53s
CI / Build and test (push) Successful in 1m10s
CI / Build container (push) Successful in 54s
CI / Deploy on waypoint (push) Successful in 43s
2025-11-07 10:31:48 -08:00
a3ff195de9 Footnotes
Some checks failed
CI / Check typos (push) Successful in 8s
CI / Check links (push) Failing after 12s
CI / Clippy (push) Successful in 54s
CI / Build and test (push) Successful in 1m11s
CI / Build container (push) Successful in 52s
CI / Deploy on waypoint (push) Successful in 44s
2025-11-06 22:20:01 -08:00
d508a0d031 Bacon 2025-11-06 22:16:15 -08:00
dc4260e147 Add md-footnote 2025-11-06 21:16:09 -08:00
4d8093c4a3 Env config
All checks were successful
CI / Check typos (push) Successful in 8s
CI / Check links (push) Successful in 6s
CI / Clippy (push) Successful in 51s
CI / Build and test (push) Successful in 1m7s
CI / Build container (push) Successful in 45s
CI / Deploy on waypoint (push) Successful in 42s
2025-11-06 20:45:44 -08:00
4091d8b486 Generate backlinks
All checks were successful
CI / Check typos (push) Successful in 8s
CI / Check links (push) Successful in 33s
CI / Clippy (push) Successful in 1m1s
CI / Build and test (push) Successful in 1m8s
CI / Build container (push) Successful in 45s
CI / Deploy on waypoint (push) Successful in 45s
2025-11-06 08:56:58 -08:00
5554aafc44 Redirect trailing slashes 2025-11-06 08:32:20 -08:00
85 changed files with 6222 additions and 1883 deletions

1541
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -64,12 +64,13 @@ type_complexity = "allow"
#
[workspace.dependencies]
macro-assets = { path = "crates/macro/macro-assets" }
macro-sass = { path = "crates/macro/macro-sass" }
assetserver = { path = "crates/lib/assetserver" }
libservice = { path = "crates/lib/libservice" }
toolbox = { path = "crates/lib/toolbox" }
page = { path = "crates/lib/page" }
md-footnote = { path = "crates/lib/md-footnote" }
md-dev = { path = "crates/lib/md-dev" }
pixel-transform = { path = "crates/lib/pixel-transform" }
service-webpage = { path = "crates/service/service-webpage" }
@@ -79,6 +80,8 @@ service-webpage = { path = "crates/service/service-webpage" }
#
axum = { version = "0.8.6", features = ["macros", "multipart"] }
tower-http = { version = "0.6.6", features = ["trace", "compression-full"] }
tower = { version = "0.5.2" }
serde_urlencoded = { version = "0.7.1" }
utoipa = "5.4.0"
utoipa-swagger-ui = { version = "9.0.2", features = [
"axum",
@@ -92,6 +95,7 @@ emojis = "0.8.0"
reqwest = { version = "0.12.24", default-features = false, features = [
"http2",
"rustls-tls",
"rustls-tls-webpki-roots", # Need to recompile to update
"cookies",
"gzip",
"stream",
@@ -117,6 +121,8 @@ tracing-loki = { version = "0.2.6", features = [
], default-features = false }
clap = { version = "4.5.51", features = ["derive"] }
anstyle = { version = "1.0.13" }
envy = "0.4.2"
dotenvy = "0.15.7"
#
# MARK: Serialization & formats
@@ -140,6 +146,14 @@ chrono = "0.4.42"
lru = "0.16.2"
parking_lot = "0.12.5"
lazy_static = "1.5.0"
image = "0.25.8"
scraper = "0.24.0"
futures = "0.3.31"
tempfile = "3.23.0"
# md_* test utilities
prettydiff = "0.9.0"
testing = "18.0.0"
#
# Macro utilities

23
README.md Normal file
View File

@@ -0,0 +1,23 @@
[utoipa]: https://docs.rs/utoipa/latest/utoipa/
[axum]: https://docs.rs/axum/latest/axum/
[betalupi.com]: https://betalupi.com
# Mark's webpage
This is the source code behind [betalupi.com], featuring a very efficient mini web framework written from scratch in Rust. It uses...
- [Axum](https://github.com/tokio-rs/axum) as an http server
- [Maud](https://maud.lambda.xyz/) for html templates
- [Grass](https://github.com/connorskees/grass) to parse and compile [sass](https://sass-lang.com/)
- [markdown-it](https://github.com/markdown-it-rust/markdown-it) to convert md to html
## Overview & Arch:
- [`bin/webpage`](./crates/bin/webpage/): Simple cli that starts `service-webpage`
- [`lib/libservice`](./crates/lib/libservice): Provides the `Service` trait. A service is a group of http routes with an optional [utoipa] schema. \
This library decouples compiled binaries from the services they provide, and makes sure all services are self-contained.
- [`lib/page`](./crates/lib/page): Provides `PageServer`, which builds an [axum] router that provides a caching and headers for resources served through http.
- Also provides `Servable`, which is a trait for any resource that may be served.
- the `Page` servable serves html generated by a closure.
- the `StaticAsset` servable serves static assets (css, images, misc files), and provides transformation parameters for image assets (via [`pixel-transform`](./crates/lib/pixel-transform)).
- [`service/service-webpage`](./crates/service/service-webpage): A `Service` that runs a `PageServer` that provides the content on [betalupi.com]

125
bacon.toml Normal file
View File

@@ -0,0 +1,125 @@
# This is a configuration file for the bacon tool
#
# Complete help on configuration: https://dystroy.org/bacon/config/
#
# You may check the current default at
# https://github.com/Canop/bacon/blob/main/defaults/default-bacon.toml
default_job = "run"
env.CARGO_TERM_COLOR = "always"
[jobs.check]
command = ["cargo", "check"]
need_stdout = false
[jobs.check-all]
command = ["cargo", "check", "--all-targets"]
need_stdout = false
# Run clippy on the default target
[jobs.clippy]
command = ["cargo", "clippy"]
need_stdout = false
# Run clippy on all targets
# To disable some lints, you may change the job this way:
# [jobs.clippy-all]
# command = [
# "cargo", "clippy",
# "--all-targets",
# "--",
# "-A", "clippy::bool_to_int_with_if",
# "-A", "clippy::collapsible_if",
# "-A", "clippy::derive_partial_eq_without_eq",
# ]
# need_stdout = false
[jobs.clippy-all]
command = ["cargo", "clippy", "--all-targets"]
need_stdout = false
# Run clippy in pedantic mode
# The 'dismiss' feature may come handy
[jobs.pedantic]
command = [
"cargo", "clippy",
"--",
"-W", "clippy::pedantic",
]
need_stdout = false
# This job lets you run
# - all tests: bacon test
# - a specific test: bacon test -- config::test_default_files
# - the tests of a package: bacon test -- -- -p config
[jobs.test]
command = ["cargo", "test"]
need_stdout = true
[jobs.nextest]
command = [
"cargo", "nextest", "run",
"--hide-progress-bar", "--failure-output", "final"
]
need_stdout = true
analyzer = "nextest"
[jobs.doc]
command = ["cargo", "doc", "--no-deps"]
need_stdout = false
# If the doc compiles, then it opens in your browser and bacon switches
# to the previous job
[jobs.doc-open]
command = ["cargo", "doc", "--no-deps", "--open"]
need_stdout = false
on_success = "back" # so that we don't open the browser at each change
# You can run your application and have the result displayed in bacon,
# if it makes sense for this crate.
[jobs.run]
command = [
"cargo", "run", "serve", "0.0.0.0:3030",
]
need_stdout = true
allow_warnings = true
background = true
on_change_strategy = "kill_then_restart"
# Run your long-running application (eg server) and have the result displayed in bacon.
# For programs that never stop (eg a server), `background` is set to false
# to have the cargo run output immediately displayed instead of waiting for
# program's end.
# 'on_change_strategy' is set to `kill_then_restart` to have your program restart
# on every change (an alternative would be to use the 'F5' key manually in bacon).
# If you often use this job, it makes sense to override the 'r' key by adding
# a binding `r = job:run-long` at the end of this file .
# A custom kill command such as the one suggested below is frequently needed to kill
# long running programs (uncomment it if you need it)
[jobs.run-long]
command = [
"cargo", "run",
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = false
on_change_strategy = "kill_then_restart"
# kill = ["pkill", "-TERM", "-P"]
# This parameterized job runs the example of your choice, as soon
# as the code compiles.
# Call it as
# bacon ex -- my-example
[jobs.ex]
command = ["cargo", "run", "--example"]
need_stdout = true
allow_warnings = true
# You may define here keybindings that would be specific to
# a project, for example a shortcut to launch a specific job.
# Shortcuts to internal functions (scrolling, toggling, etc.)
# should go in your personal global prefs.toml file instead.
[keybindings]
# alt-m = "job:my-job"
c = "job:clippy-all" # comment this to have 'c' run clippy on only the default target
p = "job:pedantic"

View File

@@ -17,3 +17,4 @@ tokio = { workspace = true }
axum = { workspace = true }
clap = { workspace = true }
anyhow = { workspace = true }
serde = { workspace = true }

View File

@@ -0,0 +1,106 @@
use serde::Deserialize;
use std::num::NonZeroUsize;
use toolbox::{
env::load_env,
logging::{LogFilterPreset, LoggingFormat, LoggingInitializer, LoggingTarget, LokiConfig},
};
use tracing::info;
#[derive(Deserialize, Clone)]
pub struct WebpageConfig {
#[serde(default)]
pub loglevel: LogFilterPreset,
#[serde(default)]
pub logformat: LoggingFormat,
#[serde(flatten)]
pub loki: Option<LokiConfig>,
// How many threads tokio should use
pub runtime_threads: Option<NonZeroUsize>,
pub blocking_threads: Option<NonZeroUsize>,
}
impl WebpageConfig {
pub fn load() -> Self {
let config_res = match load_env::<WebpageConfig>() {
Ok(x) => x,
#[expect(clippy::print_stdout)]
Err(err) => {
println!("Error while loading .env: {err}");
std::process::exit(1);
}
};
let config = config_res.get_config().clone();
info!(message = "Config loaded");
return config;
}
/*
pub fn init_logging_noloki(&self) {
let res = LoggingInitializer {
app_name: "betalupi-webpage",
loki: None,
preset: self.loglevel,
target: LoggingTarget::Stderr {
format: self.logformat,
},
}
.initialize();
if let Err(e) = res {
#[expect(clippy::print_stderr)]
for e in e.chain() {
eprintln!("{e}");
}
std::process::exit(1);
}
}
*/
/// Must be run inside a tokio context,
/// use `init_logging_noloki` if you don't have async.
pub async fn init_logging(&self) {
let res = LoggingInitializer {
app_name: "betalupi-webpage",
loki: self.loki.clone(),
preset: self.loglevel,
target: LoggingTarget::Stderr {
format: self.logformat,
},
}
.initialize();
if let Err(e) = res {
#[expect(clippy::print_stderr)]
for e in e.chain() {
eprintln!("{e}");
}
std::process::exit(1);
}
}
pub fn make_runtime(&self) -> tokio::runtime::Runtime {
let mut rt = tokio::runtime::Builder::new_multi_thread();
rt.enable_all();
if let Some(threads) = self.runtime_threads {
rt.worker_threads(threads.into());
}
if let Some(threads) = self.blocking_threads {
rt.max_blocking_threads(threads.into());
}
#[expect(clippy::unwrap_used)]
let rt = rt.build().unwrap();
return rt;
}
}

View File

@@ -1,10 +1,11 @@
use clap::Parser;
use toolbox::logging::{LogCliVQ, LoggingFormat, LoggingInitializer, LoggingTarget};
use toolbox::logging::LogCliVQ;
use tracing::error;
use crate::cmd::Command;
use crate::{cmd::Command, config::WebpageConfig};
mod cmd;
mod config;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None, styles=toolbox::cli::clap_styles())]
@@ -20,36 +21,19 @@ struct Cli {
command: Command,
}
#[derive(Debug)]
pub struct CmdContext {}
pub struct CmdContext {
config: WebpageConfig,
}
#[tokio::main]
async fn main() {
fn main() {
let cli = Cli::parse();
let ctx = CmdContext {
config: WebpageConfig::load(),
};
{
let res = LoggingInitializer {
app_name: "webpage",
loki: None,
preset: cli.vq.into_preset(),
target: LoggingTarget::Stderr {
format: LoggingFormat::Ansi,
},
}
.initialize();
if let Err(e) = res {
#[expect(clippy::print_stderr)]
for e in e.chain() {
eprintln!("{e}");
}
std::process::exit(1);
}
}
let ctx = CmdContext {};
let res = cli.command.run(ctx).await;
let rt = ctx.config.make_runtime();
rt.block_on(ctx.config.init_logging());
let res = rt.block_on(cli.command.run(ctx));
if let Err(e) = res {
for e in e.chain() {

View File

@@ -1,8 +0,0 @@
[package]
name = "assetserver"
version = { workspace = true }
rust-version = { workspace = true }
edition = { workspace = true }
[lints]
workspace = true

View File

@@ -1,14 +0,0 @@
/// A static asset with compile-time embedded data.
pub trait Asset {
/// The common URL prefix for all assets (e.g., "/assets")
const URL_PREFIX: &'static str;
/// The specific URL path for this asset (e.g., "/logo.png")
const URL_POSTFIX: &'static str;
/// The full URL for this asset (e.g., "/assets/logo.png")
const URL: &'static str;
/// The embedded file contents as a byte slice
const BYTES: &'static [u8];
}

View File

@@ -0,0 +1,15 @@
# Clone of
# https://github.com/markdown-it-rust/markdown-it-plugins.rs
[package]
name = "md-dev"
version = "0.2.0"
publish = false
rust-version = { workspace = true }
edition = { workspace = true }
[lints]
workspace = true
[dependencies]
prettydiff = { workspace = true }

View File

@@ -0,0 +1,109 @@
//! development utilities
//!
//! This contains shared code for reading test fixtures,
//! testing for differences, and regenerating expected output.
use prettydiff::diff_lines;
use std::path::PathBuf;
pub struct FixtureFile {
pub file: PathBuf,
pub title: String,
pub input: String,
pub expected: String,
}
/// Read a fixture file into a FixtureFile struct
pub fn read_fixture_file(file: PathBuf) -> FixtureFile {
#[expect(clippy::unwrap_used)]
let text = std::fs::read_to_string(&file).unwrap();
let mut lines = text.lines();
let mut title = String::new();
let mut input = String::new();
let mut expected = String::new();
loop {
match lines.next() {
None => panic!("no '....' line found to signal start of input"),
Some(line) if line.starts_with("....") => break,
Some(line) => {
title.push_str(line);
title.push('\n');
}
}
}
loop {
match lines.next() {
None => panic!("no '....' line found to signal start of expected output"),
Some(line) if line.starts_with("....") => break,
Some(line) => {
input.push_str(line);
input.push('\n');
}
}
}
loop {
match lines.next() {
None => break,
Some(line) => {
expected.push_str(line);
expected.push('\n');
}
}
}
// strip preceding empty line in input
while input.starts_with('\n') {
input = input[1..].to_string();
}
// strip trailing empty lines from input
while input.ends_with('\n') {
input.pop();
}
// strip preceding empty line in expected
while expected.starts_with('\n') {
expected = expected[1..].to_string();
}
FixtureFile {
file,
title,
input,
expected,
}
}
/// Assert that the actual output matches the expected output,
/// and panic with a diff if it does not.
pub fn assert_no_diff(f: FixtureFile, actual: &str) {
if actual.trim_end() != f.expected.trim_end() {
let diff = diff_lines(&f.expected, actual);
// if environmental variable FORCE_REGEN is set, overwrite the expected output
if std::env::var("FORCE_REGEN").is_ok() {
let written = std::fs::write(
f.file,
format!(
"{}\n......\n\n{}\n\n......\n\n{}\n",
f.title.trim_end(),
f.input,
actual.trim_end()
),
)
.is_ok();
if written {
panic!(
"\n{}\nDiff:\n{}\n\nRegenerated expected output",
f.title, diff
);
}
panic!(
"\n{}\nDiff:\n{}\n\nFailed to regenerate expected output",
f.title, diff
)
}
panic!(
"\n{}\nDiff:\n{}\nSet FORCE_REGEN=true to update fixture",
f.title, diff
);
}
}

View File

@@ -0,0 +1,21 @@
# Clone of
# https://github.com/markdown-it-rust/markdown-it-plugins.rs
[package]
name = "md-footnote"
version = "0.2.0"
description = "A markdown-it plugin for parsing footnotes"
readme = "README.md"
license = "Apache-2.0"
rust-version = { workspace = true }
edition = { workspace = true }
[lints]
workspace = true
[dependencies]
markdown-it = { workspace = true }
[dev-dependencies]
md-dev = { workspace = true }
testing = { workspace = true }

View File

@@ -0,0 +1,59 @@
# markdown-it-footnote.rs
[<img alt="crates.io" src="https://img.shields.io/crates/v/markdown-it-footnote.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/markdown-it-footnote)
A [markdown-it.rs](https://crates.io/crates/markdown-it) plugin to process footnotes.
It is based on the [pandoc definition](http://johnmacfarlane.net/pandoc/README.html#footnotes):
```md
Normal footnote:
Here is a footnote reference,[^1] and another.[^longnote]
Here is an inline note.^[my note is here!]
[^1]: Here is the footnote.
[^longnote]: Here's one with multiple blocks.
Subsequent paragraphs are indented to show that they
belong to the previous footnote.
```
See the [tests](tests/fixtures) for more examples.
## Usage
To load the full plugin:
```rust
let parser = &mut markdown_it::MarkdownIt::new();
markdown_it::plugins::cmark::add(parser);
md_footnote::add(parser);
let ast = parser.parse("Example^[my note]");
let html = ast.render();
```
Alternatively, you can load the separate components:
```rust
let parser = &mut markdown_it::MarkdownIt::new();
markdown_it::plugins::cmark::add(parser);
md_footnote::definitions::add(md);
md_footnote::references::add(md);
md_footnote::inline::add(md);
md_footnote::collect::add(md);
md_footnote::back_refs::add(md);
```
Which have the following roles:
- `definitions`: parse footnote definitions, e.g. `[^1]: foo`
- `references`: parse footnote references, e.g. `[^1]`
- `inline`: parse inline footnotes, e.g. `^[foo]`
- `collect`: collect footnote definitions (removing duplicate/unreferenced ones) and move them to be the last child of the root node.
- `back_refs`: add anchor(s) to footnote definitions, with links back to the reference(s)

View File

@@ -0,0 +1,107 @@
//! Plugin to add anchor(s) to footnote definitions,
//! with links back to the reference(s).
//!
//! ```rust
//! let parser = &mut markdown_it::MarkdownIt::new();
//! markdown_it::plugins::cmark::add(parser);
//! md_footnote::references::add(parser);
//! md_footnote::definitions::add(parser);
//! md_footnote::back_refs::add(parser);
//! let root = parser.parse("[^label]\n\n[^label]: This is a footnote");
//! let mut names = vec![];
//! root.walk(|node,_| { names.push(node.name()); });
//! assert_eq!(names, vec![
//! "markdown_it::parser::core::root::Root",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "md_footnote::references::FootnoteReference",
//! "md_footnote::definitions::FootnoteDefinition",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! "md_footnote::back_refs::FootnoteRefAnchor",
//! ]);
//! ```
use markdown_it::{
MarkdownIt, Node, NodeValue,
parser::core::{CoreRule, Root},
plugins::cmark::block::paragraph::Paragraph,
};
use crate::{FootnoteMap, definitions::FootnoteDefinition};
pub fn add(md: &mut MarkdownIt) {
// insert this rule into parser
md.add_rule::<FootnoteBackrefRule>();
}
#[derive(Debug)]
pub struct FootnoteRefAnchor {
pub ref_ids: Vec<usize>,
}
impl NodeValue for FootnoteRefAnchor {
fn render(&self, _: &Node, fmt: &mut dyn markdown_it::Renderer) {
for ref_id in self.ref_ids.iter() {
fmt.text_raw("&nbsp;");
fmt.open(
"a",
&[
("href", format!("#fnref{}", ref_id)),
("class", String::from("footnote-backref")),
],
);
// # ↩ with escape code to prevent display as Apple Emoji on iOS
fmt.text_raw("back&nbsp;\u{21a9}\u{FE0E}");
fmt.close("a");
}
}
}
// This is an extension for the markdown parser.
struct FootnoteBackrefRule;
impl CoreRule for FootnoteBackrefRule {
fn run(root: &mut Node, _: &MarkdownIt) {
// TODO this seems very cumbersome
// but it is also how the markdown_it::InlineParserRule works
#[expect(clippy::unwrap_used)]
let data = root.cast_mut::<Root>().unwrap();
let root_ext = std::mem::take(&mut data.ext);
let map = match root_ext.get::<FootnoteMap>() {
Some(map) => map,
None => return,
};
// walk through the AST and add backref anchors to footnote definitions
root.walk_mut(|node, _| {
if let Some(def_node) = node.cast::<FootnoteDefinition>() {
let ref_ids = {
match def_node.def_id {
Some(def_id) => map.referenced_by(def_id),
None => Vec::new(),
}
};
if !ref_ids.is_empty() {
// if the final child is a paragraph node,
// append the anchor to its children,
// otherwise simply append to the end of the node children
match node.children.last_mut() {
Some(last) => {
if last.is::<Paragraph>() {
last.children.push(Node::new(FootnoteRefAnchor { ref_ids }));
} else {
node.children.push(Node::new(FootnoteRefAnchor { ref_ids }));
}
}
_ => {
node.children.push(Node::new(FootnoteRefAnchor { ref_ids }));
}
}
}
}
});
#[expect(clippy::unwrap_used)]
let data = root.cast_mut::<Root>().unwrap();
data.ext = root_ext;
}
}

View File

@@ -0,0 +1,140 @@
//! Plugin to collect footnote definitions,
//! removing duplicate/unreferenced ones,
//! and move them to be the last child of the root node.
//!
//! ```rust
//! let parser = &mut markdown_it::MarkdownIt::new();
//! markdown_it::plugins::cmark::add(parser);
//! md_footnote::references::add(parser);
//! md_footnote::definitions::add(parser);
//! md_footnote::collect::add(parser);
//! let root = parser.parse("[^label]\n\n[^label]: This is a footnote\n\n> quote");
//! let mut names = vec![];
//! root.walk(|node,_| { names.push(node.name()); });
//! assert_eq!(names, vec![
//! "markdown_it::parser::core::root::Root",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "md_footnote::references::FootnoteReference",
//! "markdown_it::plugins::cmark::block::blockquote::Blockquote",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! "md_footnote::collect::FootnotesContainerNode",
//! "md_footnote::definitions::FootnoteDefinition",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! ]);
//! ```
use markdown_it::{
MarkdownIt, Node, NodeValue,
parser::core::{CoreRule, Root},
plugins::cmark::block::paragraph::Paragraph,
};
use crate::{FootnoteMap, definitions::FootnoteDefinition};
pub fn add(md: &mut MarkdownIt) {
// insert this rule into parser
md.add_rule::<FootnoteCollectRule>();
}
#[derive(Debug)]
struct PlaceholderNode;
impl NodeValue for PlaceholderNode {}
#[derive(Debug)]
pub struct FootnotesContainerNode;
impl NodeValue for FootnotesContainerNode {
fn render(&self, node: &Node, fmt: &mut dyn markdown_it::Renderer) {
let mut attrs = node.attrs.clone();
attrs.push(("class", "footnotes".into()));
fmt.cr();
fmt.self_close("hr", &[("class", "footnotes-sep".into())]);
fmt.cr();
fmt.open("section", &attrs);
fmt.cr();
fmt.open("ol", &[("class", "footnotes-list".into())]);
fmt.cr();
fmt.contents(&node.children);
fmt.cr();
fmt.close("ol");
fmt.cr();
fmt.close("section");
fmt.cr();
}
}
// This is an extension for the markdown parser.
struct FootnoteCollectRule;
impl CoreRule for FootnoteCollectRule {
// This is a custom function that will be invoked once per document.
//
// It has `root` node of the AST as an argument and may modify its
// contents as you like.
//
fn run(root: &mut Node, _: &MarkdownIt) {
// TODO this seems very cumbersome
// but it is also how the markdown_it::InlineParserRule works
#[expect(clippy::unwrap_used)]
let data = root.cast_mut::<Root>().unwrap();
let root_ext = std::mem::take(&mut data.ext);
let map = match root_ext.get::<FootnoteMap>() {
Some(map) => map,
None => return,
};
// walk through the AST and extract all footnote definitions
let mut defs = vec![];
root.walk_mut(|node, _| {
// TODO could use drain_filter if it becomes stable: https://github.com/rust-lang/rust/issues/43244
// defs.extend(
// node.children
// .drain_filter(|child| !child.is::<FootnoteDefinition>())
// .collect(),
// );
for child in node.children.iter_mut() {
if child.is::<FootnoteDefinition>() {
let mut extracted = std::mem::replace(child, Node::new(PlaceholderNode));
match extracted.cast::<FootnoteDefinition>() {
Some(def_node) => {
// skip footnotes that are not referenced
match def_node.def_id {
Some(def_id) => {
if map.referenced_by(def_id).is_empty() {
continue;
}
}
None => continue,
}
if def_node.inline {
// for inline footnotes,
// we need to wrap the definition's children in a paragraph
let mut para = Node::new(Paragraph);
std::mem::swap(&mut para.children, &mut extracted.children);
extracted.children = vec![para];
}
}
None => continue,
}
defs.push(extracted);
}
}
node.children.retain(|child| !child.is::<PlaceholderNode>());
});
if defs.is_empty() {
return;
}
// wrap the definitions in a container and append them to the root
let mut wrapper = Node::new(FootnotesContainerNode);
wrapper.children = defs;
root.children.push(wrapper);
#[expect(clippy::unwrap_used)]
let data = root.cast_mut::<Root>().unwrap();
data.ext = root_ext;
}
}

View File

@@ -0,0 +1,179 @@
//! Plugin to parse footnote definitions
//!
//! ```rust
//! let parser = &mut markdown_it::MarkdownIt::new();
//! markdown_it::plugins::cmark::add(parser);
//! md_footnote::definitions::add(parser);
//! let root = parser.parse("[^label]: This is a footnote");
//! let mut names = vec![];
//! root.walk(|node,_| { names.push(node.name()); });
//! assert_eq!(names, vec![
//! "markdown_it::parser::core::root::Root",
//! "md_footnote::definitions::FootnoteDefinition",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! ]);
//! ```
use markdown_it::parser::block::{BlockRule, BlockState};
use markdown_it::plugins::cmark::block::reference::ReferenceScanner;
use markdown_it::{MarkdownIt, Node, NodeValue, Renderer};
use crate::FootnoteMap;
/// Add the footnote definition plugin to the parser
pub fn add(md: &mut MarkdownIt) {
// insert this rule into block subparser
md.block
.add_rule::<FootnoteDefinitionScanner>()
.before::<ReferenceScanner>();
}
#[derive(Debug)]
/// AST node for footnote definition
pub struct FootnoteDefinition {
pub label: Option<String>,
pub def_id: Option<usize>,
pub inline: bool,
}
impl NodeValue for FootnoteDefinition {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
let mut attrs = node.attrs.clone();
if let Some(def_id) = self.def_id {
attrs.push(("id", format!("fn{}", def_id)));
}
attrs.push(("class", "footnote-item".into()));
fmt.cr();
fmt.open("li", &attrs);
fmt.contents(&node.children);
fmt.close("li");
fmt.cr();
}
}
/// An extension for the block subparser.
struct FootnoteDefinitionScanner;
impl FootnoteDefinitionScanner {
fn is_def(state: &mut BlockState<'_, '_>) -> Option<(String, usize)> {
if state.line_indent(state.line) >= state.md.max_indent {
return None;
}
let mut chars = state.get_line(state.line).chars();
// check line starts with the correct syntax
let Some('[') = chars.next() else {
return None;
};
let Some('^') = chars.next() else {
return None;
};
// gather the label
let mut label = String::new();
// The labels in footnote references may not contain spaces, tabs, or newlines.
// Backslash escapes form part of the label and do not escape anything
loop {
match chars.next() {
None => return None,
Some(']') => {
if let Some(':') = chars.next() {
break;
} else {
return None;
}
}
Some(' ') => return None,
Some(c) => label.push(c),
}
}
if label.is_empty() {
return None;
}
// get number of spaces to next non-space character
let mut spaces = 0;
loop {
match chars.next() {
None => break,
Some(' ') => spaces += 1,
Some('\t') => spaces += 1, // spaces += 4 - spaces % 4,
Some(_) => break,
}
}
Some((label, spaces))
}
}
impl BlockRule for FootnoteDefinitionScanner {
fn check(state: &mut BlockState<'_, '_>) -> Option<()> {
// can interrupt a block elements,
// but only if its a child of another footnote definition
// TODO I think strictly only paragraphs should be interrupted, but this is not yet possible in markdown-it.rs
if state.node.is::<FootnoteDefinition>() && Self::is_def(state).is_some() {
return Some(());
}
None
}
fn run(state: &mut BlockState<'_, '_>) -> Option<(Node, usize)> {
let (label, spaces) = Self::is_def(state)?;
// record the footnote label, so we can match references to it later
let foot_map = state.root_ext.get_or_insert_default::<FootnoteMap>();
let def_id = foot_map.add_def(&label);
// temporarily set the current node to the footnote definition
// so child nodes are added to it
let new_node = Node::new(FootnoteDefinition {
label: Some(label.clone()),
def_id,
inline: false,
});
let old_node = std::mem::replace(&mut state.node, new_node);
// store the current line and its offsets, so we can restore later
let first_line = state.line;
let first_line_offsets = state.line_offsets[first_line].clone();
// temporarily change the first line offsets to account for the footnote label
// TODO this is not quite the same as pandoc where spaces >= 8 is code block (here >= 4)
state.line_offsets[first_line].first_nonspace += "[^]:".len() + label.len() + spaces;
state.line_offsets[first_line].indent_nonspace += "[^]:".len() as i32 + spaces as i32;
// tokenize with a +4 space indent
state.blk_indent += 4;
state.md.block.tokenize(state);
state.blk_indent -= 4;
// get the number of lines the footnote definition occupies
let num_lines = state.line - first_line;
// restore the first line and its offsets
state.line_offsets[first_line] = first_line_offsets;
state.line = first_line;
// restore the original node and return the footnote and number of lines it occupies
Some((std::mem::replace(&mut state.node, old_node), num_lines))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let parser = &mut markdown_it::MarkdownIt::new();
markdown_it::plugins::cmark::add(parser);
markdown_it::plugins::sourcepos::add(parser);
add(parser);
let node = parser.parse("[^note]: a\n\nhallo\nthere\n");
// println!("{:#?}", node);
assert!(node.children.first().unwrap().is::<FootnoteDefinition>());
// let text = node.render();
// assert_eq!(text, "hallo\n")
}
}

View File

@@ -0,0 +1,147 @@
//! Plugin to parse inline footnotes
//!
//! ```rust
//! let parser = &mut markdown_it::MarkdownIt::new();
//! markdown_it::plugins::cmark::add(parser);
//! md_footnote::inline::add(parser);
//! let root = parser.parse("Example^[This is a footnote]");
//! let mut names = vec![];
//! root.walk(|node,_| { names.push(node.name()); });
//! assert_eq!(names, vec![
//! "markdown_it::parser::core::root::Root",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! "md_footnote::inline::InlineFootnote",
//! "md_footnote::definitions::FootnoteDefinition",
//! "markdown_it::parser::inline::builtin::skip_text::Text",
//! "md_footnote::references::FootnoteReference"
//! ]);
//! ```
use markdown_it::{
MarkdownIt, Node, NodeValue,
parser::inline::{InlineRule, InlineState},
};
use crate::{FootnoteMap, definitions::FootnoteDefinition};
/// Add the inline footnote plugin to the parser
pub fn add(md: &mut MarkdownIt) {
// insert this rule into inline subparser
md.inline.add_rule::<InlineFootnoteScanner>();
}
#[derive(Debug)]
pub struct InlineFootnote;
impl NodeValue for InlineFootnote {
fn render(&self, node: &Node, fmt: &mut dyn markdown_it::Renderer) {
// simply pass-through to children
fmt.contents(&node.children);
}
}
// This is an extension for the inline subparser.
struct InlineFootnoteScanner;
impl InlineRule for InlineFootnoteScanner {
const MARKER: char = '^';
fn check(state: &mut InlineState<'_, '_>) -> Option<usize> {
let mut chars = state.src[state.pos..state.pos_max].chars();
// check line starts with the correct syntax
let Some('^') = chars.next() else {
return None;
};
let Some('[') = chars.next() else {
return None;
};
let content_start = state.pos + 2;
match parse_footnote(state, content_start) {
Some(content_end) => Some(content_end + 1 - state.pos),
None => None,
}
}
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let mut chars = state.src[state.pos..state.pos_max].chars();
// check line starts with the correct syntax
let Some('^') = chars.next() else {
return None;
};
let Some('[') = chars.next() else {
return None;
};
let content_start = state.pos + 2;
match parse_footnote(state, content_start) {
Some(content_end) => {
let foot_map = state.root_ext.get_or_insert_default::<FootnoteMap>();
let (def_id, ref_id) = foot_map.add_inline_def();
// create node and set it as current
let current_node = std::mem::replace(
&mut state.node,
Node::new(FootnoteDefinition {
label: None,
def_id: Some(def_id),
inline: true,
}),
);
// perform nested parsing
let start = state.pos;
let max = state.pos_max;
state.pos = content_start;
state.pos_max = content_end;
state.md.inline.tokenize(state);
state.pos = start;
state.pos_max = max;
// restore current node
let def_node = std::mem::replace(&mut state.node, current_node);
let ref_node = Node::new(crate::references::FootnoteReference {
label: None,
ref_id,
def_id,
});
// wrap the footnote definition and reference in an outer node to return
let mut outer_node = Node::new(InlineFootnote);
outer_node.children = vec![def_node, ref_node];
Some((outer_node, content_end + 1 - state.pos))
}
None => None,
}
}
}
// returns the end position of the footnote
// this function assumes that first character ("[") already matches;
fn parse_footnote(state: &mut InlineState<'_, '_>, start: usize) -> Option<usize> {
let old_pos = state.pos;
let mut label_end = None;
state.pos = start + 1;
let mut found = false;
while let Some(ch) = state.src[state.pos..state.pos_max].chars().next() {
if ch == ']' {
found = true;
break;
}
state.md.inline.skip_token(state);
}
if found {
label_end = Some(state.pos);
}
// restore old state
state.pos = old_pos;
label_end
}

View File

@@ -0,0 +1,89 @@
//! A [markdown_it] plugin for parsing footnotes
//!
//! ```
//! let parser = &mut markdown_it::MarkdownIt::new();
//! md_footnote::add(parser);
//! let node = parser.parse("[^note]\n\n[^note]: A footnote\n");
//! ```
use std::collections::HashMap;
use markdown_it::{MarkdownIt, parser::extset::RootExt};
pub mod back_refs;
pub mod collect;
pub mod definitions;
pub mod inline;
pub mod references;
// Silence lints
#[cfg(test)]
use md_dev as _;
#[cfg(test)]
use testing as _;
/// Add the full footnote plugin to the parser
pub fn add(md: &mut MarkdownIt) {
definitions::add(md);
references::add(md);
inline::add(md);
collect::add(md);
back_refs::add(md);
}
#[derive(Debug, Default)]
/// The set of parsed footnote definition labels,
/// stored in the root node.
pub struct FootnoteMap {
def_counter: usize,
ref_counter: usize,
label_to_def: HashMap<String, usize>,
def_to_refs: HashMap<usize, Vec<usize>>,
}
impl RootExt for FootnoteMap {}
impl FootnoteMap {
/// Create an ID for the definition,
/// or return None if a definition already exists for the label
pub fn add_def(&mut self, label: &str) -> Option<usize> {
if self.label_to_def.contains_key(label) {
return None;
}
self.def_counter += 1;
self.label_to_def
.insert(String::from(label), self.def_counter);
Some(self.def_counter)
}
/// Create an ID for the reference and return (def_id, ref_id),
/// or return None if no definition exists for the label
pub fn add_ref(&mut self, label: &str) -> Option<(usize, usize)> {
match self.label_to_def.get(label) {
Some(def_id) => {
self.ref_counter += 1;
// self.def_to_refs.get_mut(&def_id).unwrap().push(self.ref_counter);
match self.def_to_refs.get_mut(def_id) {
Some(refs) => refs.push(self.ref_counter),
None => {
self.def_to_refs.insert(*def_id, vec![self.ref_counter]);
}
}
Some((*def_id, self.ref_counter))
}
None => None,
}
}
/// Add an inline definition and return (def_id, ref_id)
pub fn add_inline_def(&mut self) -> (usize, usize) {
self.def_counter += 1;
self.ref_counter += 1;
self.def_to_refs
.insert(self.def_counter, vec![self.ref_counter]);
(self.def_counter, self.ref_counter)
}
/// return the IDs of all references to the given definition ID
pub fn referenced_by(&self, def_id: usize) -> Vec<usize> {
match self.def_to_refs.get(&def_id) {
Some(ids) => ids.clone(),
None => Vec::new(),
}
}
}

View File

@@ -0,0 +1,108 @@
//! Plugin to parse footnote references
//!
//! ```rust
//! let parser = &mut markdown_it::MarkdownIt::new();
//! markdown_it::plugins::cmark::add(parser);
//! md_footnote::references::add(parser);
//! md_footnote::definitions::add(parser);
//! let root = parser.parse("[^label]\n\n[^label]: This is a footnote");
//! let mut names = vec![];
//! root.walk(|node,_| { names.push(node.name()); });
//! assert_eq!(names, vec![
//! "markdown_it::parser::core::root::Root",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "md_footnote::references::FootnoteReference",
//! "md_footnote::definitions::FootnoteDefinition",
//! "markdown_it::plugins::cmark::block::paragraph::Paragraph",
//! "markdown_it::parser::inline::builtin::skip_text::Text"
//! ]);
//! ```
use markdown_it::parser::inline::{InlineRule, InlineState};
use markdown_it::{MarkdownIt, Node, NodeValue, Renderer};
use crate::FootnoteMap;
/// Add the footnote reference parsing to the markdown parser
pub fn add(md: &mut MarkdownIt) {
// insert this rule into inline subparser
md.inline.add_rule::<FootnoteReferenceScanner>();
}
#[derive(Debug)]
/// AST node for footnote reference
pub struct FootnoteReference {
pub label: Option<String>,
pub ref_id: usize,
pub def_id: usize,
}
impl NodeValue for FootnoteReference {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
let mut attrs = node.attrs.clone();
attrs.push(("class", "footnote-ref".into()));
fmt.open("sup", &attrs);
fmt.open(
"a",
&[
("href", format!("#fn{}", self.def_id)),
("id", format!("fnref{}", self.ref_id)),
],
);
fmt.text(&format!("[{}]", self.def_id));
fmt.close("a");
fmt.close("sup");
}
}
// This is an extension for the inline subparser.
struct FootnoteReferenceScanner;
impl InlineRule for FootnoteReferenceScanner {
const MARKER: char = '[';
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let mut chars = state.src[state.pos..state.pos_max].chars();
// check line starts with the correct syntax
let Some('[') = chars.next() else {
return None;
};
let Some('^') = chars.next() else {
return None;
};
// gather the label
let mut label = String::new();
// The labels in footnote references may not contain spaces, tabs, or newlines.
// Backslash escapes form part of the label and do not escape anything
loop {
match chars.next() {
None => return None,
Some(']') => {
break;
}
Some(' ') => return None,
Some(c) => label.push(c),
}
}
if label.is_empty() {
return None;
}
let definitions = state.root_ext.get_or_insert_default::<FootnoteMap>();
let (def_id, ref_id) = definitions.add_ref(&label)?;
let length = label.len() + 3; // 3 for '[^' and ']'
// return new node and length of this structure
Some((
Node::new(FootnoteReference {
label: Some(label),
ref_id,
def_id,
}),
length,
))
}
}

View File

@@ -0,0 +1,19 @@
#![expect(unused_imports)]
#![expect(unused_crate_dependencies)]
use std::path::PathBuf;
use testing::fixture;
/*
#[fixture("tests/fixtures/[!_]*.md")]
fn test_html(file: PathBuf) {
let f = md_dev::read_fixture_file(file);
let parser = &mut markdown_it::MarkdownIt::new();
markdown_it::plugins::sourcepos::add(parser);
markdown_it::plugins::cmark::add(parser);
md_footnote::add(parser);
let actual = parser.parse(&f.input).render();
md_dev::assert_no_diff(f, &actual);
}
*/

View File

@@ -0,0 +1,31 @@
Basic test
......
[^a]
[^a]
[^a]: Multi
line
Multi-paragraph
[^a]: duplicate
normal paragraph
......
<p data-sourcepos="1:1-2:4"><sup data-sourcepos="1:1-1:4" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup>
<sup data-sourcepos="2:1-2:4" class="footnote-ref"><a href="#fn1" id="fnref2">[1]</a></sup></p>
<p data-sourcepos="11:1-11:16">normal paragraph</p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="4:1-8:0" id="fn1" class="footnote-item">
<p data-sourcepos="4:7-5:4">Multi
line</p>
<p data-sourcepos="7:5-7:19">Multi-paragraph <a href="#fnref1" class="footnote-backref">↩︎</a> <a href="#fnref2" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,45 @@
Pandoc example
......
Here is a footnote reference,[^1] and another.[^longnote]
[^1]: Here is the footnote.
[^longnote]: Here's one with multiple blocks.
Subsequent paragraphs are indented to show that they
belong to the previous footnote.
{ some.code }
The whole paragraph can be indented, or just the first
line. In this way, multi-paragraph footnotes work like
multi-paragraph list items.
This paragraph won't be part of the note, because it
isn't indented.
......
<p data-sourcepos="1:1-1:57">Here is a footnote reference,<sup data-sourcepos="1:30-1:33" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup> and another.<sup data-sourcepos="1:47-1:57" class="footnote-ref"><a href="#fn2" id="fnref2">[2]</a></sup></p>
<p data-sourcepos="16:1-17:15">This paragraph won't be part of the note, because it
isn't indented.</p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-4:0" id="fn1" class="footnote-item">
<p data-sourcepos="3:7-3:27">Here is the footnote. <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
<li data-sourcepos="5:1-15:0" id="fn2" class="footnote-item">
<p data-sourcepos="5:14-5:45">Here's one with multiple blocks.</p>
<p data-sourcepos="7:5-8:32">Subsequent paragraphs are indented to show that they
belong to the previous footnote.</p>
<pre><code data-sourcepos="10:9-10:21">{ some.code }
</code></pre>
<p data-sourcepos="12:5-14:31">The whole paragraph can be indented, or just the first
line. In this way, multi-paragraph footnotes work like
multi-paragraph list items. <a href="#fnref2" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,19 @@
Newline after footnote identifier
......
[^a]
[^a]:
b
......
<p data-sourcepos="1:1-1:4"><sup data-sourcepos="1:1-1:4" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<p data-sourcepos="4:1-4:1">b</p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-3:5" id="fn1" class="footnote-item"> <a href="#fnref1" class="footnote-backref">↩︎</a></li>
</ol>
</section>

View File

@@ -0,0 +1,27 @@
They could terminate each other
......
[^1][^2][^3]
[^1]: foo
[^2]: bar
[^3]: baz
......
<p data-sourcepos="1:1-1:12"><sup data-sourcepos="1:1-1:4" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup><sup data-sourcepos="1:5-1:8" class="footnote-ref"><a href="#fn2" id="fnref2">[2]</a></sup><sup data-sourcepos="1:9-1:12" class="footnote-ref"><a href="#fn3" id="fnref3">[3]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-3:9" id="fn1" class="footnote-item">
<p data-sourcepos="3:7-3:9">foo <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
<li data-sourcepos="4:1-4:9" id="fn2" class="footnote-item">
<p data-sourcepos="4:7-4:9">bar <a href="#fnref2" class="footnote-backref">↩︎</a></p>
</li>
<li data-sourcepos="5:1-5:9" id="fn3" class="footnote-item">
<p data-sourcepos="5:7-5:9">baz <a href="#fnref3" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,23 @@
They could be inside blockquotes, and are lazy
......
[^foo]
> [^foo]: bar
baz
......
<p data-sourcepos="1:1-1:6"><sup data-sourcepos="1:1-1:6" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<blockquote data-sourcepos="3:1-4:3">
</blockquote>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:3-4:3" id="fn1" class="footnote-item">
<p data-sourcepos="3:11-4:3">bar
baz <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,14 @@
Their labels could not contain spaces or newlines
......
[^ foo]: bar baz
[^foo
]: bar baz
......
<p data-sourcepos="1:1-1:16">[^ foo]: bar baz</p>
<p data-sourcepos="3:1-4:10">[^foo
]: bar baz</p>

View File

@@ -0,0 +1,19 @@
Duplicate footnotes:
......
[^xxxxx] [^xxxxx]
[^xxxxx]: foo
......
<p data-sourcepos="1:1-1:17"><sup data-sourcepos="1:1-1:8" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup> <sup data-sourcepos="1:10-1:17" class="footnote-ref"><a href="#fn1" id="fnref2">[1]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-3:13" id="fn1" class="footnote-item">
<p data-sourcepos="3:11-3:13">foo <a href="#fnref1" class="footnote-backref">↩︎</a> <a href="#fnref2" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,28 @@
Indents
......
[^xxxxx] [^yyyyy]
[^xxxxx]: foo
---
[^yyyyy]: foo
---
......
<p data-sourcepos="1:1-1:17"><sup data-sourcepos="1:1-1:8" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup> <sup data-sourcepos="1:10-1:17" class="footnote-ref"><a href="#fn2" id="fnref2">[2]</a></sup></p>
<hr data-sourcepos="7:4-7:6">
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-5:0" id="fn1" class="footnote-item">
<h2 data-sourcepos="3:11-4:7">foo</h2>
<a href="#fnref1" class="footnote-backref">↩︎</a></li>
<li data-sourcepos="6:1-6:13" id="fn2" class="footnote-item">
<p data-sourcepos="6:11-6:13">foo <a href="#fnref2" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,19 @@
Indents for the first line (tabs)
......
[^xxxxx]
[^xxxxx]: foo
......
<p data-sourcepos="1:1-1:8"><sup data-sourcepos="1:1-1:8" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-3:14" id="fn1" class="footnote-item">
<p data-sourcepos="3:12-3:14">foo <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,37 @@
Nested blocks
......
[^a]
[^a]: abc
def
hij
- list
> block
terminates here
......
<p data-sourcepos="1:1-1:4"><sup data-sourcepos="1:1-1:4" class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<p data-sourcepos="12:1-12:15">terminates here</p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li data-sourcepos="3:1-11:0" id="fn1" class="footnote-item">
<p data-sourcepos="3:7-3:9">abc</p>
<p data-sourcepos="5:5-6:3">def
hij</p>
<ul data-sourcepos="8:5-9:0">
<li data-sourcepos="8:5-9:0">list</li>
</ul>
<blockquote data-sourcepos="10:5-10:11">
<p data-sourcepos="10:7-10:11">block</p>
</blockquote>
<a href="#fnref1" class="footnote-backref">↩︎</a></li>
</ol>
</section>

View File

@@ -0,0 +1,24 @@
Indents for the first line
.............
[^xxxxx] [^yyyyy]
[^xxxxx]: foo
[^yyyyy]: foo
.............
<p><sup class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup> <sup class="footnote-ref"><a href="#fn2" id="fnref2">[2]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li id="fn1" class="footnote-item">
<p>foo <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
<li id="fn2" class="footnote-item">
<pre><code>foo
</code></pre>
<a href="#fnref2" class="footnote-backref">↩︎</a></li>
</ol>
</section>

View File

@@ -0,0 +1,21 @@
We support inline notes too (pandoc example)
......
Here is an inline note.^[Inlines notes are easier to write, since
you don't have to pick an identifier and move down to type the
note.]
......
<p data-sourcepos="1:1-3:6">Here is an inline note.<sup class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li id="fn1" class="footnote-item">
<p>Inlines notes are easier to write, since
you don't have to pick an identifier and move down to type the
note. <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,17 @@
Inline footnotes can have arbitrary markup
......
foo^[ *bar* ]
......
<p data-sourcepos="1:1-1:13">foo<sup class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li id="fn1" class="footnote-item">
<p> <em data-sourcepos="1:7-1:11">bar</em> <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,19 @@
Should allow links in inline footnotes
......
Example^[this is another example [a]]
[a]: https://github.com
......
<p data-sourcepos="1:1-1:37">Example<sup class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li id="fn1" class="footnote-item">
<p>this is another example <a data-sourcepos="1:34-1:36" href="https://github.com">a</a> <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -0,0 +1,19 @@
nested inline footnotes
......
[Example^[this is another example [a]]][a]
[a]: https://github.com
......
<p data-sourcepos="1:1-1:42"><a data-sourcepos="1:1-1:42" href="https://github.com">Example<sup class="footnote-ref"><a href="#fn1" id="fnref1">[1]</a></sup></a></p>
<hr class="footnotes-sep">
<section class="footnotes">
<ol class="footnotes-list">
<li id="fn1" class="footnote-item">
<p>this is another example <a data-sourcepos="1:35-1:37" href="https://github.com">a</a> <a href="#fnref1" class="footnote-backref">↩︎</a></p>
</li>
</ol>
</section>

View File

@@ -8,13 +8,14 @@ edition = { workspace = true }
workspace = true
[dependencies]
libservice = { workspace = true }
toolbox = { workspace = true }
pixel-transform = { workspace = true }
axum = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
maud = { workspace = true }
chrono = { workspace = true }
parking_lot = { workspace = true }
serde = { workspace = true }
lru = { workspace = true }
tower-http = { workspace = true }
tower = { workspace = true }
serde_urlencoded = { workspace = true }

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,11 @@
htmx.defineExtension('json-enc', {
onEvent: function (name, evt) {
if (name === "htmx:configRequest") {
evt.detail.headers['Content-Type'] = "application/json";
}
},
encodeParameters: function (xhr, parameters, elt) {
xhr.overrideMimeType('text/json');
return (JSON.stringify(parameters));
}
});

View File

@@ -1,8 +1,25 @@
mod page;
pub use page::*;
//! A web stack for embedded uis.
//!
//! Featuring:
//! - htmx
//! - axum
//! - rust
//! - and maud
mod requestcontext;
pub use requestcontext::*;
pub mod servable;
mod server;
pub use server::*;
mod types;
pub use types::*;
mod route;
pub use route::*;
pub const HTMX_2_0_8: servable::StaticAsset = servable::StaticAsset {
bytes: include_str!("../htmx/htmx-2.0.8.min.js").as_bytes(),
mime: toolbox::mime::MimeType::Javascript,
};
pub const EXT_JSON_1_19_12: servable::StaticAsset = servable::StaticAsset {
bytes: include_str!("../htmx/json-enc-1.9.12.js").as_bytes(),
mime: toolbox::mime::MimeType::Javascript,
};

View File

@@ -1,105 +0,0 @@
use chrono::TimeDelta;
use maud::{Markup, Render, html};
use serde::Deserialize;
use std::pin::Pin;
use crate::RequestContext;
//
// MARK: metadata
//
#[derive(Debug, Clone, Hash, PartialEq, Eq, Deserialize)]
pub struct PageMetadata {
pub title: String,
pub author: Option<String>,
pub description: Option<String>,
pub image: Option<String>,
pub slug: Option<String>,
}
impl Default for PageMetadata {
fn default() -> Self {
Self {
title: "Untitled page".into(),
author: None,
description: None,
image: None,
slug: None,
}
}
}
impl Render for PageMetadata {
fn render(&self) -> Markup {
let empty = String::new();
let title = &self.title;
let author = &self.author.as_ref().unwrap_or(&empty);
let description = &self.description.as_ref().unwrap_or(&empty);
let image = &self.image.as_ref().unwrap_or(&empty);
html !(
meta property="og:site_name" content=(title) {}
meta name="title" content=(title) {}
meta property="og:title" content=(title) {}
meta property="twitter:title" content=(title) {}
meta name="author" content=(author) {}
meta name="description" content=(description) {}
meta property="og:description" content=(description) {}
meta property="twitter:description" content=(description) {}
meta content=(image) property="og:image" {}
link rel="shortcut icon" href=(image) type="image/x-icon" {}
)
}
}
//
// MARK: page
//
// Some HTML
pub struct Page {
pub meta: PageMetadata,
/// How long this page's html may be cached.
/// This controls the maximum age of a page shown to the user.
///
/// If `None`, this page is always rendered from scratch.
pub html_ttl: Option<TimeDelta>,
/// A function that generates this page's html.
///
/// This should return the contents of this page's <body> tag,
/// or the contents of a wrapper element (defined in the page server struct).
///
/// This closure must never return `<html>` or `<head>`.
pub generate_html: Box<
dyn Send
+ Sync
+ for<'a> Fn(
&'a Page,
&'a RequestContext,
) -> Pin<Box<dyn Future<Output = Markup> + 'a + Send + Sync>>,
>,
}
impl Default for Page {
fn default() -> Self {
Page {
meta: Default::default(),
html_ttl: Some(TimeDelta::seconds(60 * 24 * 30)),
//css_ttl: Duration::from_secs(60 * 24 * 30),
//generate_css: None,
generate_html: Box::new(|_, _| Box::pin(async { html!() })),
}
}
}
impl Page {
pub async fn generate_html(&self, req_info: &RequestContext) -> Markup {
(self.generate_html)(self, req_info).await
}
}

View File

@@ -1,60 +0,0 @@
use axum::http::HeaderMap;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RequestContext {
pub client_info: ClientInfo,
}
//
//
//
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum DeviceType {
Mobile,
Desktop,
}
impl Default for DeviceType {
fn default() -> Self {
Self::Desktop
}
}
//
// MARK: clientinfo
//
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ClientInfo {
/// This is an estimate, but it's probably good enough.
pub device_type: DeviceType,
}
impl ClientInfo {
pub fn from_headers(headers: &HeaderMap) -> Self {
let ua = headers
.get("user-agent")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
let ch_mobile = headers
.get("Sec-CH-UA-Mobile")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
let mut device_type = None;
if device_type.is_none() && ch_mobile.contains("1") {
device_type = Some(DeviceType::Mobile);
}
if device_type.is_none() && ua.contains("Mobile") {
device_type = Some(DeviceType::Mobile);
}
Self {
device_type: device_type.unwrap_or_default(),
}
}
}

View File

@@ -0,0 +1,275 @@
use axum::{
Router,
body::Body,
http::{HeaderMap, HeaderValue, Method, Request, StatusCode, header},
response::{IntoResponse, Response},
};
use chrono::TimeDelta;
use std::{
collections::{BTreeMap, HashMap},
convert::Infallible,
net::SocketAddr,
pin::Pin,
sync::Arc,
task::{Context, Poll},
time::Instant,
};
use toolbox::mime::MimeType;
use tower::Service;
use tracing::trace;
use crate::{ClientInfo, RenderContext, Rendered, RenderedBody, servable::Servable};
struct Default404 {}
impl Servable for Default404 {
fn head<'a>(
&'a self,
_ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<()>> + 'a + Send + Sync>> {
Box::pin(async {
return Rendered {
code: StatusCode::NOT_FOUND,
body: (),
ttl: Some(TimeDelta::days(1)),
immutable: true,
headers: HeaderMap::new(),
mime: Some(MimeType::Html),
};
})
}
fn render<'a>(
&'a self,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<RenderedBody>> + 'a + Send + Sync>> {
Box::pin(async { self.head(ctx).await.with_body(RenderedBody::Empty) })
}
}
/// A set of related [Servable]s under one route.
///
/// Use as follows:
/// ```ignore
///
/// // Add compression, for example.
/// // Also consider CORS and timeout.
/// let compression: CompressionLayer = CompressionLayer::new()
/// .br(true)
/// .deflate(true)
/// .gzip(true)
/// .zstd(true)
/// .compress_when(DefaultPredicate::new());
///
/// let route = ServableRoute::new()
/// .add_page(
/// "/page",
/// StaticAsset {
/// bytes: "I am a page".as_bytes(),
/// mime: MimeType::Text,
/// },
/// );
///
/// Router::new()
/// .nest_service("/", route)
/// .layer(compression.clone());
/// ```
#[derive(Clone)]
pub struct ServableRoute {
pages: Arc<HashMap<String, Arc<dyn Servable>>>,
notfound: Arc<dyn Servable>,
}
impl ServableRoute {
pub fn new() -> Self {
Self {
pages: Arc::new(HashMap::new()),
notfound: Arc::new(Default404 {}),
}
}
/// Set this server's "not found" page
pub fn with_404<S: Servable + 'static>(mut self, page: S) -> Self {
self.notfound = Arc::new(page);
self
}
/// Add a page to this server at the given route.
/// - panics if route does not start with a `/`, ends with a `/`, or contains `//`.
/// - urls are normalized, routes that violate this condition will never be served.
/// - `/` is an exception, it is valid.
/// - panics if called after this service is started
/// - overwrites existing pages
pub fn add_page<S: Servable + 'static>(mut self, route: impl Into<String>, page: S) -> Self {
let route = route.into();
if !route.starts_with("/") {
panic!("route must start with /")
};
if route.ends_with("/") && route != "/" {
panic!("route must not end with /")
};
if route.contains("//") {
panic!("route must not contain //")
};
#[expect(clippy::expect_used)]
Arc::get_mut(&mut self.pages)
.expect("add_pages called after service was started")
.insert(route, Arc::new(page));
self
}
/// Convenience method.
/// Turns this service into a router.
///
/// Equivalent to:
/// ```ignore
/// Router::new().fallback_service(self)
/// ```
pub fn into_router<T: Clone + Send + Sync + 'static>(self) -> Router<T> {
Router::new().fallback_service(self)
}
}
//
// MARK: impl Service
//
impl Service<Request<Body>> for ServableRoute {
type Response = Response;
type Error = Infallible;
type Future =
Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send + 'static>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
if req.method() != Method::GET && req.method() != Method::HEAD {
let mut headers = HeaderMap::with_capacity(1);
headers.insert(header::ACCEPT, HeaderValue::from_static("GET,HEAD"));
return Box::pin(async {
Ok((StatusCode::METHOD_NOT_ALLOWED, headers).into_response())
});
}
let pages = self.pages.clone();
let notfound = self.notfound.clone();
Box::pin(async move {
let addr = req.extensions().get::<SocketAddr>().copied();
let route = req.uri().path().to_owned();
let headers = req.headers().clone();
let query: BTreeMap<String, String> =
serde_urlencoded::from_str(req.uri().query().unwrap_or("")).unwrap_or_default();
let start = Instant::now();
let client_info = ClientInfo::from_headers(&headers);
let ua = headers
.get("user-agent")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
trace!(
message = "Serving route",
route,
addr = ?addr,
user_agent = ua,
device_type = ?client_info.device_type
);
// Normalize url with redirect
if (route.ends_with('/') && route != "/") || route.contains("//") {
let mut new_route = route.clone();
while new_route.contains("//") {
new_route = new_route.replace("//", "/");
}
let new_route = new_route.trim_matches('/');
trace!(
message = "Redirecting",
route,
new_route,
addr = ?addr,
user_agent = ua,
device_type = ?client_info.device_type
);
let mut headers = HeaderMap::with_capacity(1);
match HeaderValue::from_str(&format!("/{new_route}")) {
Ok(x) => headers.append(header::LOCATION, x),
Err(_) => return Ok(StatusCode::BAD_REQUEST.into_response()),
};
return Ok((StatusCode::PERMANENT_REDIRECT, headers).into_response());
}
let ctx = RenderContext {
client_info,
route,
query,
};
let page = pages.get(&ctx.route).unwrap_or(&notfound);
let mut rend = match req.method() == Method::HEAD {
true => page.head(&ctx).await.with_body(RenderedBody::Empty),
false => page.render(&ctx).await,
};
// Tweak headers
{
if !rend.headers.contains_key(header::CACHE_CONTROL) {
let max_age = rend.ttl.map(|x| x.num_seconds()).unwrap_or(1).max(1);
let mut value = String::new();
if rend.immutable {
value.push_str("immutable, ");
}
value.push_str("public, ");
value.push_str(&format!("max-age={}, ", max_age));
#[expect(clippy::unwrap_used)]
rend.headers.insert(
header::CACHE_CONTROL,
HeaderValue::from_str(value.trim().trim_end_matches(',')).unwrap(),
);
}
if !rend.headers.contains_key("Accept-CH") {
rend.headers
.insert("Accept-CH", HeaderValue::from_static("Sec-CH-UA-Mobile"));
}
if !rend.headers.contains_key(header::CONTENT_TYPE)
&& let Some(mime) = &rend.mime
{
#[expect(clippy::unwrap_used)]
rend.headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_str(&mime.to_string()).unwrap(),
);
}
}
trace!(
message = "Served route",
route = ctx.route,
addr = ?addr,
user_agent = ua,
device_type = ?client_info.device_type,
time_ns = start.elapsed().as_nanos()
);
Ok(match rend.body {
RenderedBody::Markup(m) => (rend.code, rend.headers, m.0).into_response(),
RenderedBody::Static(d) => (rend.code, rend.headers, d).into_response(),
RenderedBody::Bytes(d) => (rend.code, rend.headers, d).into_response(),
RenderedBody::String(s) => (rend.code, rend.headers, s).into_response(),
RenderedBody::Empty => (rend.code, rend.headers).into_response(),
})
})
}
}

View File

@@ -0,0 +1,175 @@
use axum::http::{HeaderMap, StatusCode};
use chrono::TimeDelta;
use pixel_transform::TransformerChain;
use std::{pin::Pin, str::FromStr};
use toolbox::mime::MimeType;
use tracing::{error, trace};
use crate::{RenderContext, Rendered, RenderedBody, servable::Servable};
pub struct StaticAsset {
pub bytes: &'static [u8],
pub mime: MimeType,
}
impl Servable for StaticAsset {
fn head<'a>(
&'a self,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<()>> + 'a + Send + Sync>> {
Box::pin(async {
let ttl = Some(TimeDelta::days(30));
let is_image = TransformerChain::mime_is_image(&self.mime);
let transform = match (is_image, ctx.query.get("t")) {
(false, _) | (_, None) => None,
(true, Some(x)) => match TransformerChain::from_str(x) {
Ok(x) => Some(x),
Err(_err) => {
return Rendered {
code: StatusCode::BAD_REQUEST,
body: (),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: None,
};
}
},
};
match transform {
Some(transform) => {
return Rendered {
code: StatusCode::OK,
body: (),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: Some(
transform
.output_mime(&self.mime)
.unwrap_or(self.mime.clone()),
),
};
}
None => {
return Rendered {
code: StatusCode::OK,
body: (),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: Some(self.mime.clone()),
};
}
}
})
}
fn render<'a>(
&'a self,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<RenderedBody>> + 'a + Send + Sync>> {
Box::pin(async {
let ttl = Some(TimeDelta::days(30));
// Automatically provide transformation if this is an image
let is_image = TransformerChain::mime_is_image(&self.mime);
let transform = match (is_image, ctx.query.get("t")) {
(false, _) | (_, None) => None,
(true, Some(x)) => match TransformerChain::from_str(x) {
Ok(x) => Some(x),
Err(err) => {
return Rendered {
code: StatusCode::BAD_REQUEST,
body: RenderedBody::String(err),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: None,
};
}
},
};
match transform {
Some(transform) => {
trace!(message = "Transforming image", ?transform);
let task = {
let mime = Some(self.mime.clone());
let bytes = self.bytes;
tokio::task::spawn_blocking(move || {
transform.transform_bytes(bytes, mime.as_ref())
})
};
let res = match task.await {
Ok(x) => x,
Err(error) => {
error!(message = "Error while transforming image", ?error);
return Rendered {
code: StatusCode::INTERNAL_SERVER_ERROR,
body: RenderedBody::String(format!(
"Error while transforming image: {error:?}"
)),
ttl: None,
immutable: true,
headers: HeaderMap::new(),
mime: None,
};
}
};
match res {
Ok((mime, bytes)) => {
return Rendered {
code: StatusCode::OK,
body: RenderedBody::Bytes(bytes),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: Some(mime),
};
}
Err(err) => {
return Rendered {
code: StatusCode::INTERNAL_SERVER_ERROR,
body: RenderedBody::String(format!("{err}")),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: None,
};
}
}
}
None => {
return Rendered {
code: StatusCode::OK,
body: RenderedBody::Static(self.bytes),
ttl,
immutable: true,
headers: HeaderMap::new(),
mime: Some(self.mime.clone()),
};
}
}
})
}
}

View File

@@ -0,0 +1,26 @@
mod asset;
pub use asset::*;
mod page;
pub use page::*;
mod redirect;
pub use redirect::*;
/// Something that may be served over http.
pub trait Servable: Send + Sync {
/// Return the same response as [Servable::render], but with an empty body.
/// Used to respond to `HEAD` requests.
fn head<'a>(
&'a self,
ctx: &'a crate::RenderContext,
) -> std::pin::Pin<Box<dyn Future<Output = crate::Rendered<()>> + 'a + Send + Sync>>;
/// Render this page
fn render<'a>(
&'a self,
ctx: &'a crate::RenderContext,
) -> std::pin::Pin<
Box<dyn Future<Output = crate::Rendered<crate::RenderedBody>> + 'a + Send + Sync>,
>;
}

View File

@@ -0,0 +1,309 @@
use axum::http::{HeaderMap, StatusCode};
use chrono::TimeDelta;
use maud::{DOCTYPE, Markup, PreEscaped, html};
use serde::Deserialize;
use std::{pin::Pin, sync::Arc};
use toolbox::mime::MimeType;
use crate::{RenderContext, Rendered, RenderedBody, servable::Servable};
//
// MARK: metadata
//
#[derive(Debug, Clone, Hash, PartialEq, Eq, Deserialize)]
pub struct PageMetadata {
pub title: String,
pub author: Option<String>,
pub description: Option<String>,
pub image: Option<String>,
}
impl Default for PageMetadata {
fn default() -> Self {
Self {
title: "Untitled page".into(),
author: None,
description: None,
image: None,
}
}
}
//
// MARK: page
//
#[derive(Clone)]
pub struct Page {
pub meta: PageMetadata,
pub immutable: bool,
/// How long this page's html may be cached.
/// This controls the maximum age of a page shown to the user.
///
/// If `None`, this page is always rendered from scratch.
pub html_ttl: Option<TimeDelta>,
/// A function that generates this page's html.
///
/// This should return the contents of this page's <body> tag,
/// or the contents of a wrapper element (defined in the page server struct).
///
/// This closure must never return `<html>` or `<head>`.
pub generate_html: Arc<
dyn Send
+ Sync
+ 'static
+ for<'a> Fn(
&'a Page,
&'a RenderContext,
) -> Pin<Box<dyn Future<Output = Markup> + Send + Sync + 'a>>,
>,
pub response_code: StatusCode,
pub scripts_inline: Vec<String>,
pub scripts_linked: Vec<String>,
pub styles_linked: Vec<String>,
pub styles_inline: Vec<String>,
/// `name`, `content` for extra `<meta>` tags
pub extra_meta: Vec<(String, String)>,
}
impl Default for Page {
fn default() -> Self {
Page {
// No cache by default
html_ttl: None,
immutable: false,
meta: Default::default(),
generate_html: Arc::new(|_, _| Box::pin(async { html!() })),
response_code: StatusCode::OK,
scripts_inline: Vec::new(),
scripts_linked: Vec::new(),
styles_inline: Vec::new(),
styles_linked: Vec::new(),
extra_meta: Vec::new(),
}
}
}
impl Page {
pub async fn generate_html(&self, ctx: &RenderContext) -> Markup {
(self.generate_html)(self, ctx).await
}
pub fn immutable(mut self, immutable: bool) -> Self {
self.immutable = immutable;
self
}
pub fn html_ttl(mut self, html_ttl: Option<TimeDelta>) -> Self {
self.html_ttl = html_ttl;
self
}
pub fn response_code(mut self, response_code: StatusCode) -> Self {
self.response_code = response_code;
self
}
pub fn with_script_inline(mut self, script: impl Into<String>) -> Self {
self.scripts_inline.push(script.into());
self
}
pub fn with_script_linked(mut self, url: impl Into<String>) -> Self {
self.scripts_linked.push(url.into());
self
}
pub fn with_style_inline(mut self, style: impl Into<String>) -> Self {
self.styles_inline.push(style.into());
self
}
pub fn with_style_linked(mut self, url: impl Into<String>) -> Self {
self.styles_linked.push(url.into());
self
}
pub fn with_extra_meta(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.extra_meta.push((key.into(), value.into()));
self
}
}
impl Servable for Page {
fn head<'a>(
&'a self,
_ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<()>> + 'a + Send + Sync>> {
Box::pin(async {
return Rendered {
code: self.response_code,
body: (),
ttl: self.html_ttl,
immutable: self.immutable,
headers: HeaderMap::new(),
mime: Some(MimeType::Html),
};
})
}
fn render<'a>(
&'a self,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<RenderedBody>> + 'a + Send + Sync>> {
Box::pin(async {
let inner_html = self.generate_html(ctx).await;
let html = html! {
(DOCTYPE)
html {
head {
meta charset="UTF-8";
meta name="viewport" content="width=device-width, initial-scale=1,user-scalable=no";
meta content="text/html; charset=UTF-8" http-equiv="content-type";
meta property="og:type" content="website";
@for (name, content) in &self.extra_meta {
meta name=(name) content=(content);
}
//
// Metadata
//
title { (PreEscaped(self.meta.title.clone())) }
meta property="og:site_name" content=(self.meta.title);
meta name="title" content=(self.meta.title);
meta property="og:title" content=(self.meta.title);
meta property="twitter:title" content=(self.meta.title);
@if let Some(author) = &self.meta.author {
meta name="author" content=(author);
}
@if let Some(desc) = &self.meta.description {
meta name="description" content=(desc);
meta property="og:description" content=(desc);
meta property="twitter:description" content=(desc);
}
@if let Some(image) = &self.meta.image {
meta content=(image) property="og:image";
link rel="shortcut icon" href=(image) type="image/x-icon";
}
//
// Scripts & styles
//
@for script in &self.scripts_linked {
script src=(script) {}
}
@for style in &self.styles_linked {
link rel="stylesheet" type="text/css" href=(style);
}
@for script in &self.scripts_inline {
script { (PreEscaped(script)) }
}
@for style in &self.styles_inline {
style { (PreEscaped(style)) }
}
}
body { main { (inner_html) } }
}
};
return self.head(ctx).await.with_body(RenderedBody::Markup(html));
})
}
}
//
// MARK: template
//
pub struct PageTemplate {
pub immutable: bool,
pub html_ttl: Option<TimeDelta>,
pub response_code: StatusCode,
pub scripts_inline: &'static [&'static str],
pub scripts_linked: &'static [&'static str],
pub styles_inline: &'static [&'static str],
pub styles_linked: &'static [&'static str],
pub extra_meta: &'static [(&'static str, &'static str)],
}
impl Default for PageTemplate {
fn default() -> Self {
Self::const_default()
}
}
impl PageTemplate {
pub const fn const_default() -> Self {
Self {
html_ttl: Some(TimeDelta::days(1)),
immutable: true,
response_code: StatusCode::OK,
scripts_inline: &[],
scripts_linked: &[],
styles_inline: &[],
styles_linked: &[],
extra_meta: &[],
}
}
/// Create a new page using this template,
/// with the given metadata and renderer.
pub fn derive<
R: Send
+ Sync
+ 'static
+ for<'a> Fn(
&'a Page,
&'a RenderContext,
) -> Pin<Box<dyn Future<Output = Markup> + Send + Sync + 'a>>,
>(
&self,
meta: PageMetadata,
generate_html: R,
) -> Page {
Page {
meta,
immutable: self.immutable,
html_ttl: self.html_ttl,
response_code: self.response_code,
scripts_inline: self
.scripts_inline
.iter()
.map(|x| (*x).to_owned())
.collect(),
scripts_linked: self
.scripts_linked
.iter()
.map(|x| (*x).to_owned())
.collect(),
styles_inline: self.styles_inline.iter().map(|x| (*x).to_owned()).collect(),
styles_linked: self.styles_linked.iter().map(|x| (*x).to_owned()).collect(),
extra_meta: self
.extra_meta
.iter()
.map(|(a, b)| ((*a).to_owned(), (*b).to_owned()))
.collect(),
generate_html: Arc::new(generate_html),
}
}
}

View File

@@ -0,0 +1,48 @@
use std::pin::Pin;
use axum::http::{
HeaderMap, HeaderValue, StatusCode,
header::{self, InvalidHeaderValue},
};
use crate::{RenderContext, Rendered, RenderedBody, servable::Servable};
pub struct Redirect {
to: HeaderValue,
}
impl Redirect {
pub fn new(to: impl Into<String>) -> Result<Self, InvalidHeaderValue> {
Ok(Self {
to: HeaderValue::from_str(&to.into())?,
})
}
}
impl Servable for Redirect {
fn head<'a>(
&'a self,
_ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<()>> + 'a + Send + Sync>> {
Box::pin(async {
let mut headers = HeaderMap::with_capacity(1);
headers.append(header::LOCATION, self.to.clone());
return Rendered {
code: StatusCode::PERMANENT_REDIRECT,
headers,
body: (),
ttl: None,
immutable: true,
mime: None,
};
})
}
fn render<'a>(
&'a self,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Rendered<RenderedBody>> + 'a + Send + Sync>> {
Box::pin(async { self.head(ctx).await.with_body(RenderedBody::Empty) })
}
}

View File

@@ -1,206 +0,0 @@
use axum::{
Router,
extract::{ConnectInfo, Path, State},
http::{HeaderMap, HeaderValue, StatusCode, header},
response::{IntoResponse, Response},
routing::get,
};
use chrono::{DateTime, Utc};
use libservice::ServiceConnectInfo;
use lru::LruCache;
use maud::Markup;
use parking_lot::Mutex;
use std::{collections::HashMap, num::NonZero, pin::Pin, sync::Arc, time::Instant};
use tower_http::compression::{CompressionLayer, DefaultPredicate};
use tracing::{trace, warn};
use crate::{ClientInfo, RequestContext, page::Page};
pub struct PageServer {
/// If true, expired pages will be rerendered before being sent to the user.
/// If false, requests never trigger rerenders. We rely on the rerender task.
///
/// If true, we deliver fresher pages but delay responses.
/// TODO: replace this with a smarter rendering strategy?
never_rerender_on_request: bool,
/// Map of `{ route: page }`
pages: Arc<Mutex<HashMap<String, Arc<Page>>>>,
/// Map of `{ route: (page data, expire time) }`
///
/// We use an LruCache for bounded memory usage.
html_cache: Mutex<LruCache<(String, RequestContext), (String, DateTime<Utc>)>>,
/// Called whenever we need to render a page.
/// - this method should call `page.generate_html()`,
/// - wrap the result in `<html><body>`,
/// - and add `<head>`
/// ```
render_page: Box<
dyn Send
+ Sync
+ for<'a> Fn(
&'a Page,
&'a RequestContext,
) -> Pin<Box<dyn Future<Output = Markup> + 'a + Send + Sync>>,
>,
}
impl PageServer {
pub fn new(
render_page: Box<
dyn Send
+ Sync
+ for<'a> Fn(
&'a Page,
&'a RequestContext,
) -> Pin<Box<dyn Future<Output = Markup> + 'a + Send + Sync>>,
>,
) -> Arc<Self> {
#[expect(clippy::unwrap_used)]
let cache_size = NonZero::new(128).unwrap();
Arc::new(Self {
pages: Arc::new(Mutex::new(HashMap::new())),
html_cache: Mutex::new(LruCache::new(cache_size)),
render_page,
never_rerender_on_request: true,
})
}
pub fn add_page(&self, route: impl Into<String>, page: Page) -> &Self {
#[expect(clippy::expect_used)]
let route = route
.into()
.strip_prefix("/")
.expect("page route must start with /")
.to_owned();
self.pages.lock().insert(route, Arc::new(page));
self
}
/// Re-render the page at `route`, regardless of cache state.
/// Does nothing if there is no page at `route`.
///
/// Returns the rendered page's content.
async fn render_page(
&self,
reason: &'static str,
route: &str,
req_ctx: RequestContext,
) -> Option<(String, Option<DateTime<Utc>>)> {
let now = Utc::now();
let start = Instant::now();
trace!(message = "Rendering page", route, reason);
let page = match self.pages.lock().get(route) {
Some(x) => x.clone(),
None => {
warn!(message = "Not rerendering, no such route", route, reason);
return None;
}
};
let html = (self.render_page)(&page, &req_ctx).await.0;
let mut expires = None;
if let Some(ttl) = page.html_ttl {
expires = Some(now + ttl);
self.html_cache
.lock()
.put((route.to_owned(), req_ctx), (html.clone(), now + ttl));
}
let elapsed = start.elapsed().as_millis();
trace!(message = "Rendered page", route, reason, time_ms = elapsed);
return Some((html, expires));
}
async fn handler(
Path(route): Path<String>,
State(state): State<Arc<Self>>,
ConnectInfo(addr): ConnectInfo<ServiceConnectInfo>,
headers: HeaderMap,
) -> Response {
let client_info = ClientInfo::from_headers(&headers);
let ua = headers
.get("user-agent")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
trace!(
message = "Serving route",
route,
addr = ?addr.addr,
user_agent = ua,
device_type = ?client_info.device_type
);
let req_ctx = RequestContext { client_info };
let cache_key = (route.clone(), req_ctx.clone());
let now = Utc::now();
let mut html_expires = None;
// Get from cache, if available
if let Some((html, expires)) = state.html_cache.lock().get(&cache_key)
&& (*expires > now || state.never_rerender_on_request)
{
html_expires = Some((html.clone(), Some(*expires)));
};
if html_expires.is_none() {
html_expires = match state.render_page("request", &route, req_ctx).await {
Some(x) => Some(x.clone()),
None => return (StatusCode::NOT_FOUND, "page doesn't exist").into_response(),
};
}
#[expect(clippy::unwrap_used)]
let (html, expires) = html_expires.unwrap();
let mut headers = HeaderMap::with_capacity(3);
headers.append(
header::CONTENT_TYPE,
HeaderValue::from_static("text/html; charset=utf-8"),
);
let max_age = match expires {
Some(expires) => (expires - now).num_seconds().max(1),
None => 1,
};
#[expect(clippy::unwrap_used)]
headers.append(
header::CACHE_CONTROL,
// immutable; public/private
HeaderValue::from_str(&format!("immutable, public, max-age={}", max_age)).unwrap(),
);
headers.append("Accept-CH", HeaderValue::from_static("Sec-CH-UA-Mobile"));
return (headers, html).into_response();
}
pub fn into_router(self: Arc<Self>) -> Router<()> {
let compression: CompressionLayer = CompressionLayer::new()
.br(true)
.deflate(true)
.gzip(true)
.zstd(true)
.compress_when(DefaultPredicate::new());
Router::new()
.route(
"/",
get(|state, conn, headers| async {
Self::handler(Path(String::new()), state, conn, headers).await
}),
)
.route("/{*path}", get(Self::handler))
.layer(compression)
.with_state(self)
}
}

View File

@@ -0,0 +1,110 @@
use axum::http::{HeaderMap, StatusCode};
use chrono::TimeDelta;
use maud::Markup;
use std::collections::BTreeMap;
use toolbox::mime::MimeType;
//
// MARK: rendered
//
#[derive(Clone)]
pub enum RenderedBody {
Markup(Markup),
Static(&'static [u8]),
Bytes(Vec<u8>),
String(String),
Empty,
}
pub trait RenderedBodyType {}
impl RenderedBodyType for () {}
impl RenderedBodyType for RenderedBody {}
#[derive(Clone)]
pub struct Rendered<T: RenderedBodyType> {
pub code: StatusCode,
pub headers: HeaderMap,
pub body: T,
pub mime: Option<MimeType>,
/// How long to cache this response.
/// If none, don't cache.
pub ttl: Option<TimeDelta>,
pub immutable: bool,
}
impl Rendered<()> {
/// Turn this [Rendered] into a [Rendered] with a body.
pub fn with_body(self, body: RenderedBody) -> Rendered<RenderedBody> {
Rendered {
code: self.code,
headers: self.headers,
body,
mime: self.mime,
ttl: self.ttl,
immutable: self.immutable,
}
}
}
//
// MARK: context
//
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RenderContext {
pub client_info: ClientInfo,
pub route: String,
pub query: BTreeMap<String, String>,
}
//
// MARK: clientinfo
//
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum DeviceType {
Mobile,
Desktop,
}
impl Default for DeviceType {
fn default() -> Self {
Self::Desktop
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ClientInfo {
/// This is an estimate, but it's probably good enough.
pub device_type: DeviceType,
}
impl ClientInfo {
pub fn from_headers(headers: &HeaderMap) -> Self {
let ua = headers
.get("user-agent")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
let ch_mobile = headers
.get("Sec-CH-UA-Mobile")
.and_then(|x| x.to_str().ok())
.unwrap_or("");
let mut device_type = None;
if device_type.is_none() && ch_mobile.contains("1") {
device_type = Some(DeviceType::Mobile);
}
if device_type.is_none() && ua.contains("Mobile") {
device_type = Some(DeviceType::Mobile);
}
Self {
device_type: device_type.unwrap_or_default(),
}
}
}

View File

@@ -0,0 +1,16 @@
[package]
name = "pixel-transform"
version = { workspace = true }
rust-version = { workspace = true }
edition = { workspace = true }
[lints]
workspace = true
[dependencies]
toolbox = { workspace = true }
serde = { workspace = true }
thiserror = { workspace = true }
image = { workspace = true }
strum = { workspace = true }

View File

@@ -0,0 +1,159 @@
use image::{DynamicImage, ImageFormat};
use serde::{Deserialize, Deserializer, de};
use std::{fmt::Display, hash::Hash, io::Cursor, str::FromStr};
use thiserror::Error;
use toolbox::mime::MimeType;
use crate::transformers::{ImageTransformer, TransformerEnum};
#[derive(Debug, Error)]
pub enum TransformBytesError {
#[error("{0} is not a valid image type")]
NotAnImage(String),
#[error("error while processing image")]
ImageError(#[from] image::ImageError),
}
#[derive(Debug, Clone)]
pub struct TransformerChain {
pub steps: Vec<TransformerEnum>,
}
impl TransformerChain {
#[inline]
pub fn mime_is_image(mime: &MimeType) -> bool {
ImageFormat::from_mime_type(mime.to_string()).is_some()
}
pub fn transform_image(&self, mut image: DynamicImage) -> DynamicImage {
for step in &self.steps {
match step {
TransformerEnum::Format { .. } => {}
TransformerEnum::MaxDim(t) => t.transform(&mut image),
TransformerEnum::Crop(t) => t.transform(&mut image),
}
}
return image;
}
pub fn output_mime(&self, input_mime: &MimeType) -> Option<MimeType> {
let mime = self
.steps
.last()
.and_then(|x| match x {
TransformerEnum::Format { format } => Some(MimeType::from(format.to_mime_type())),
_ => None,
})
.unwrap_or(input_mime.clone());
let fmt = ImageFormat::from_mime_type(mime.to_string());
fmt.map(|_| mime)
}
pub fn transform_bytes(
&self,
image_bytes: &[u8],
image_format: Option<&MimeType>,
) -> Result<(MimeType, Vec<u8>), TransformBytesError> {
let format: ImageFormat = match image_format {
Some(x) => ImageFormat::from_mime_type(x.to_string())
.ok_or(TransformBytesError::NotAnImage(x.to_string()))?,
None => image::guess_format(image_bytes)?,
};
let out_format = self
.steps
.last()
.and_then(|x| match x {
TransformerEnum::Format { format } => Some(format),
_ => None,
})
.unwrap_or(&format);
let img = image::load_from_memory_with_format(image_bytes, format)?;
let img = self.transform_image(img);
let out_mime = MimeType::from(out_format.to_mime_type());
let mut out_bytes = Cursor::new(Vec::new());
img.write_to(&mut out_bytes, *out_format)?;
return Ok((out_mime, out_bytes.into_inner()));
}
}
impl FromStr for TransformerChain {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let steps_str = s.split(";");
let mut steps = Vec::new();
for s in steps_str {
let s = s.trim();
if s.is_empty() {
continue;
}
let step = s.parse();
match step {
Ok(x) => steps.push(x),
Err(msg) => return Err(format!("invalid step `{s}`: {msg}")),
}
}
let n_format = steps
.iter()
.filter(|x| matches!(x, TransformerEnum::Format { .. }))
.count();
if n_format > 2 {
return Err("provide at most one format()".to_owned());
}
if n_format == 1 && !matches!(steps.last(), Some(TransformerEnum::Format { .. })) {
return Err("format() must be last".to_owned());
}
return Ok(Self { steps });
}
}
impl<'de> Deserialize<'de> for TransformerChain {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Self::from_str(&s).map_err(de::Error::custom)
}
}
impl Display for TransformerChain {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut first = true;
for step in &self.steps {
if first {
write!(f, "{step}")?;
first = false
} else {
write!(f, ";{step}")?;
}
}
return Ok(());
}
}
impl PartialEq for TransformerChain {
fn eq(&self, other: &Self) -> bool {
self.to_string() == other.to_string()
}
}
impl Eq for TransformerChain {}
impl Hash for TransformerChain {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.to_string().hash(state);
}
}

View File

@@ -0,0 +1,6 @@
mod pixeldim;
pub mod transformers;
mod chain;
pub use chain::*;

View File

@@ -0,0 +1,68 @@
use serde::{Deserialize, Deserializer};
use std::fmt;
use std::str::FromStr;
// TODO: parse -, + (100vw - 10px)
// TODO: parse 100vw [min] 10
// TODO: parse 100vw [max] 10
#[derive(Debug, Clone, PartialEq)]
pub enum PixelDim {
Pixels(u32),
WidthPercent(f32),
HeightPercent(f32),
}
impl FromStr for PixelDim {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let numeric_end = s.find(|c: char| !c.is_ascii_digit() && c != '.');
let (quantity, unit) = numeric_end.map(|x| s.split_at(x)).unwrap_or((s, "px"));
let quantity = quantity.trim();
let unit = unit.trim();
match unit {
"vw" => Ok(PixelDim::WidthPercent(
quantity
.parse()
.map_err(|_err| format!("invalid quantity {quantity}"))?,
)),
"vh" => Ok(PixelDim::HeightPercent(
quantity
.parse()
.map_err(|_err| format!("invalid quantity {quantity}"))?,
)),
"px" => Ok(PixelDim::Pixels(
quantity
.parse()
.map_err(|_err| format!("invalid quantity {quantity}"))?,
)),
_ => Err(format!("invalid unit {unit}")),
}
}
}
impl<'de> Deserialize<'de> for PixelDim {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(serde::de::Error::custom)
}
}
impl fmt::Display for PixelDim {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
PixelDim::Pixels(px) => write!(f, "{px}"),
PixelDim::WidthPercent(p) => write!(f, "{p:.2}vw"),
PixelDim::HeightPercent(p) => write!(f, "{p:.2}vh"),
}
}
}

View File

@@ -0,0 +1,184 @@
use std::{fmt::Display, str::FromStr};
use image::DynamicImage;
use serde::{Deserialize, Serialize};
use strum::{Display, EnumString};
use crate::{pixeldim::PixelDim, transformers::ImageTransformer};
#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, Serialize, Deserialize, Display)]
pub enum Direction {
#[serde(rename = "n")]
#[strum(to_string = "n")]
#[strum(serialize = "north")]
North,
#[serde(rename = "e")]
#[strum(serialize = "e")]
#[strum(serialize = "east")]
East,
#[serde(rename = "s")]
#[strum(serialize = "s")]
#[strum(serialize = "south")]
South,
#[serde(rename = "w")]
#[strum(to_string = "w")]
#[strum(serialize = "west")]
West,
#[serde(rename = "c")]
#[strum(serialize = "c")]
#[strum(serialize = "center")]
Center,
#[serde(rename = "ne")]
#[strum(serialize = "ne")]
#[strum(serialize = "northeast")]
NorthEast,
#[serde(rename = "se")]
#[strum(serialize = "se")]
#[strum(serialize = "southeast")]
SouthEast,
#[serde(rename = "nw")]
#[strum(serialize = "nw")]
#[strum(serialize = "northwest")]
NorthWest,
#[serde(rename = "sw")]
#[strum(serialize = "sw")]
#[strum(serialize = "southwest")]
SouthWest,
}
/// Crop an image to the given size.
/// - does not crop width if `w` is greater than image width
/// - does not crop height if `h` is greater than image height
/// - does nothing if `w` or `h` are less than or equal to zero.
#[derive(Debug, Clone, PartialEq)]
pub struct CropTransformer {
w: PixelDim,
h: PixelDim,
float: Direction,
}
impl CropTransformer {
pub fn new(w: PixelDim, h: PixelDim, float: Direction) -> Self {
Self { w, h, float }
}
fn crop_dim(&self, img_width: u32, img_height: u32) -> (u32, u32) {
let crop_width = match self.w {
PixelDim::Pixels(w) => w,
PixelDim::WidthPercent(pct) => ((img_width as f32) * pct / 100.0) as u32,
PixelDim::HeightPercent(pct) => ((img_height as f32) * pct / 100.0) as u32,
};
let crop_height = match self.h {
PixelDim::Pixels(h) => h,
PixelDim::WidthPercent(pct) => ((img_width as f32) * pct / 100.0) as u32,
PixelDim::HeightPercent(pct) => ((img_height as f32) * pct / 100.0) as u32,
};
(crop_width, crop_height)
}
#[expect(clippy::integer_division)]
fn crop_pos(
&self,
img_width: u32,
img_height: u32,
crop_width: u32,
crop_height: u32,
) -> (u32, u32) {
match self.float {
Direction::North => {
let x = (img_width - crop_width) / 2;
let y = 0;
(x, y)
}
Direction::East => {
let x = img_width - crop_width;
let y = (img_height - crop_height) / 2;
(x, y)
}
Direction::South => {
let x = (img_width - crop_width) / 2;
let y = img_height - crop_height;
(x, y)
}
Direction::West => {
let x = 0;
let y = (img_height - crop_height) / 2;
(x, y)
}
Direction::Center => {
let x = (img_width - crop_width) / 2;
let y = (img_height - crop_height) / 2;
(x, y)
}
Direction::NorthEast => {
let x = img_width - crop_width;
let y = 0;
(x, y)
}
Direction::SouthEast => {
let x = img_width - crop_width;
let y = img_height - crop_height;
(x, y)
}
Direction::NorthWest => {
let x = 0;
let y = 0;
(x, y)
}
Direction::SouthWest => {
let x = 0;
let y = img_height - crop_height;
(x, y)
}
}
}
}
impl Display for CropTransformer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "crop({},{},{})", self.w, self.h, self.float)
}
}
impl ImageTransformer for CropTransformer {
fn parse_args(args: &str) -> Result<Self, String> {
let args: Vec<&str> = args.split(",").collect();
if args.len() != 3 {
return Err(format!("expected 3 args, got {}", args.len()));
}
let w = args[0].trim().parse::<PixelDim>()?;
let h = args[1].trim().parse::<PixelDim>()?;
let direction = args[2].trim();
let direction = Direction::from_str(direction)
.map_err(|_err| format!("invalid direction {direction}"))?;
Ok(Self {
w,
h,
float: direction,
})
}
fn transform(&self, input: &mut DynamicImage) {
let (img_width, img_height) = (input.width(), input.height());
let (crop_width, crop_height) = self.crop_dim(img_width, img_height);
if (crop_width < img_width || crop_height < img_height) && crop_width > 0 && crop_height > 0
{
let (x, y) = self.crop_pos(img_width, img_height, crop_width, crop_height);
*input = input.crop(x, y, crop_width, crop_height);
}
}
}

View File

@@ -0,0 +1,82 @@
use std::fmt::Display;
use image::{DynamicImage, imageops::FilterType};
use crate::{pixeldim::PixelDim, transformers::ImageTransformer};
#[derive(Debug, Clone, PartialEq)]
pub struct MaxDimTransformer {
w: PixelDim,
h: PixelDim,
}
impl MaxDimTransformer {
pub fn new(w: PixelDim, h: PixelDim) -> Self {
Self { w, h }
}
fn target_dim(&self, img_width: u32, img_height: u32) -> (u32, u32) {
let max_width = match self.w {
PixelDim::Pixels(w) => Some(w),
PixelDim::WidthPercent(pct) => Some(((img_width as f32) * pct / 100.0) as u32),
PixelDim::HeightPercent(_) => None,
};
let max_height = match self.h {
PixelDim::Pixels(h) => Some(h),
PixelDim::HeightPercent(pct) => Some(((img_height as f32) * pct / 100.0) as u32),
PixelDim::WidthPercent(_) => None,
};
if max_width.map(|x| img_width <= x).unwrap_or(true)
&& max_height.map(|x| img_height <= x).unwrap_or(true)
{
return (img_width, img_height);
}
let width_ratio = max_width
.map(|x| x as f32 / img_width as f32)
.unwrap_or(1.0);
let height_ratio = max_height
.map(|x| x as f32 / img_height as f32)
.unwrap_or(1.0);
let ratio = width_ratio.min(height_ratio);
(
(img_width as f32 * ratio) as u32,
(img_height as f32 * ratio) as u32,
)
}
}
impl Display for MaxDimTransformer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "maxdim({},{})", self.w, self.h)
}
}
impl ImageTransformer for MaxDimTransformer {
fn parse_args(args: &str) -> Result<Self, String> {
let args: Vec<&str> = args.split(",").collect();
if args.len() != 2 {
return Err(format!("expected 2 args, got {}", args.len()));
}
let w = args[0].parse::<PixelDim>()?;
let h = args[1].parse::<PixelDim>()?;
Ok(Self { w, h })
}
fn transform(&self, input: &mut DynamicImage) {
let (img_width, img_height) = (input.width(), input.height());
let (target_width, target_height) = self.target_dim(img_width, img_height);
// Only resize if needed
if target_width != img_width || target_height != img_height {
*input = input.resize(target_width, target_height, FilterType::Lanczos3);
}
}
}

View File

@@ -0,0 +1,165 @@
use image::{DynamicImage, ImageFormat};
use std::fmt;
use std::fmt::{Debug, Display};
use std::str::FromStr;
mod crop;
pub use crop::*;
mod maxdim;
pub use maxdim::*;
pub trait ImageTransformer
where
Self: PartialEq,
Self: Sized + Clone,
Self: Display + Debug,
{
/// Transform the given image in place
fn transform(&self, input: &mut DynamicImage);
/// Parse an arg string.
///
/// `name({arg_string})`
fn parse_args(args: &str) -> Result<Self, String>;
}
use serde::{Deserialize, Deserializer};
/// An enum of all [`ImageTransformer`]s
#[derive(Debug, Clone, PartialEq)]
pub enum TransformerEnum {
/// Usage: `maxdim(w, h)`
///
/// Scale the image so its width is smaller than `w`
/// and its height is smaller than `h`. Aspect ratio is preserved.
///
/// To only limit the size of one dimension, use `vw` or `vh`.
/// For example, `maxdim(50,100vh)` will not limit width.
MaxDim(MaxDimTransformer),
/// Usage: `crop(w, h, float)`
///
/// Crop the image to at most `w` by `h` pixels,
/// floating the crop area in the specified direction.
///
/// Directions are one of:
/// - Cardinal: n,e,s,w
/// - Diagonal: ne,nw,se,sw,
/// - Centered: c
///
/// Examples:
/// - `crop(100vw, 50)` gets the top 50 pixels of the image \
/// (or fewer, if the image's height is smaller than 50)
///
/// To only limit the size of one dimension, use `vw` or `vh`.
/// For example, `maxdim(50,100vh)` will not limit width.
Crop(CropTransformer),
/// Usage: `format(format)`
///
/// Transcode the image to the given format.
/// This step must be last, and cannot be provided
/// more than once.
///
/// Valid formats:
/// - bmp
/// - gif
/// - ico
/// - jpeg or jpg
/// - png
/// - qoi
/// - webp
///
/// Example:
/// - `format(png)`
///
/// When transcoding an animated gif, the first frame is taken
/// and all others are thrown away. This happens even if we
/// transcode from a gif to a gif.
Format { format: ImageFormat },
}
impl FromStr for TransformerEnum {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
let (name, args) = {
let name_len = match s.find('(') {
Some(x) => x + 1,
None => {
return Err(format!(
"invalid transformation {s}. Must look like name(args)."
));
}
};
let mut balance = 1;
let mut end = name_len;
for i in s[name_len..].bytes() {
match i {
b')' => balance -= 1,
b'(' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return Err(format!("mismatched parenthesis in {s}"));
}
let name = s[0..name_len - 1].trim();
let args = s[name_len..end].trim();
let trail = s[end + 1..].trim();
if !trail.is_empty() {
return Err(format!(
"invalid transformation {s}. Must look like name(args)."
));
}
(name, args)
};
match name {
"maxdim" => Ok(Self::MaxDim(MaxDimTransformer::parse_args(args)?)),
"crop" => Ok(Self::Crop(CropTransformer::parse_args(args)?)),
"format" => Ok(TransformerEnum::Format {
format: ImageFormat::from_extension(args)
.ok_or(format!("invalid image format {args}"))?,
}),
_ => Err(format!("unknown transformation {name}")),
}
}
}
impl<'de> Deserialize<'de> for TransformerEnum {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
s.parse().map_err(serde::de::Error::custom)
}
}
impl Display for TransformerEnum {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TransformerEnum::MaxDim(x) => Display::fmt(x, f),
TransformerEnum::Crop(x) => Display::fmt(x, f),
TransformerEnum::Format { format } => {
write!(f, "format({})", format.extensions_str()[0])
}
}
}
}

View File

@@ -14,12 +14,16 @@ tokio = { workspace = true }
serde = { workspace = true }
tracing = { workspace = true }
num = { workspace = true }
thiserror = { workspace = true }
envy = { workspace = true }
dotenvy = { workspace = true }
clap = { workspace = true, optional = true }
anstyle = { workspace = true, optional = true }
tracing-subscriber = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
tracing-loki = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
url = { workspace = true, optional = true }

View File

@@ -0,0 +1,106 @@
use serde::de::DeserializeOwned;
use std::{
collections::HashMap,
env::VarError,
io::ErrorKind,
path::{Path, PathBuf},
};
use thiserror::Error;
/// An error we might encounter when loading an env
#[derive(Debug, Error)]
pub enum EnvLoadError {
#[error("i/o error")]
IOError(#[from] std::io::Error),
#[error("varerror")]
VarError(#[from] VarError),
#[error("line parse error: `{on_line}` at char {at_char}")]
LineParse { on_line: String, at_char: usize },
#[error("other dotenvy error")]
Other(#[from] dotenvy::Error),
#[error("missing value {0}")]
MissingValue(String),
#[error("parse error: {0}")]
OtherParseError(String),
}
pub enum LoadedEnv<T> {
/// We loaded config from `.env` and env vars
FoundFile { config: T, path: PathBuf },
/// We could not find `.env` and only loaded env vars
OnlyVars(T),
}
impl<T> LoadedEnv<T> {
pub fn get_config(&self) -> &T {
match self {
Self::FoundFile { config, .. } => config,
Self::OnlyVars(config) => config,
}
}
}
/// Load the configuration type `T` from the current environment,
/// including the `.env` if it exists.
#[expect(clippy::wildcard_enum_match_arm)]
pub fn load_env<T: DeserializeOwned>() -> Result<LoadedEnv<T>, EnvLoadError> {
let env_path = match dotenvy::dotenv() {
Ok(path) => Some(path),
Err(dotenvy::Error::Io(err)) => match err.kind() {
ErrorKind::NotFound => None,
_ => return Err(EnvLoadError::IOError(err)),
},
Err(dotenvy::Error::EnvVar(err)) => {
return Err(EnvLoadError::VarError(err));
}
Err(dotenvy::Error::LineParse(on_line, at_char)) => {
return Err(EnvLoadError::LineParse { on_line, at_char });
}
Err(err) => {
return Err(EnvLoadError::Other(err));
}
};
match envy::from_env::<T>() {
Ok(config) => {
if let Some(path) = env_path {
return Ok(LoadedEnv::FoundFile { path, config });
} else {
return Ok(LoadedEnv::OnlyVars(config));
}
}
Err(envy::Error::MissingValue(value)) => {
return Err(EnvLoadError::MissingValue(value.into()));
}
Err(envy::Error::Custom(message)) => {
return Err(EnvLoadError::OtherParseError(message));
}
};
}
/// Load an .env file to a hashmap.
///
/// This function does not read the current env,
/// only parsing vars explicitly declared in the given file.
pub fn load_env_dict(p: impl AsRef<Path>) -> Result<HashMap<String, String>, EnvLoadError> {
let mut out = HashMap::new();
for item in dotenvy::from_filename_iter(p)? {
let (key, val) = item?;
out.insert(key, val);
}
return Ok(out);
}

View File

@@ -1,5 +1,6 @@
//! This crate contains various bits of useful code that don't fit anywhere else.
pub mod env;
pub mod mime;
pub mod misc;
pub mod strings;

View File

@@ -120,8 +120,10 @@ impl LogCliVQ {
preset = LogFilterPreset::Info
} else if level_i == 1 {
preset = LogFilterPreset::Debug
} else if level_i >= 2 {
} else if level_i == 2 {
preset = LogFilterPreset::Trace
} else if level_i >= 3 {
preset = LogFilterPreset::HyperTrace
} else {
unreachable!()
}
@@ -153,6 +155,9 @@ pub enum LogFilterPreset {
/// Standard "trace" log level
Trace,
/// Trace EVERYTHING
HyperTrace,
/// Filter for loki subscriber.
///
/// This is similar to `Trace`,
@@ -240,6 +245,19 @@ impl LogFilterPreset {
service: LogLevel::Trace,
},
Self::HyperTrace => LoggingConfig {
other: LogLevel::Trace,
silence: LogLevel::Trace,
// Libs
libservice: LogLevel::Trace,
toolbox: LogLevel::Trace,
// Bins
webpage: LogLevel::Trace,
service: LogLevel::Trace,
},
Self::Loki => LoggingConfig {
other: LogLevel::Trace,
silence: LogLevel::Warn,

View File

@@ -64,6 +64,8 @@ pub enum MimeType {
Jpg,
/// Portable Network Graphics (image/png)
Png,
/// Quite ok Image Format
Qoi,
/// Scalable Vector Graphics (image/svg+xml)
Svg,
/// Tagged Image File Format (image/tiff)
@@ -217,7 +219,9 @@ impl<'de> Deserialize<'de> for MimeType {
}
}
//
// MARK: misc
//
impl Default for MimeType {
fn default() -> Self {
@@ -243,6 +247,27 @@ impl From<&MimeType> for String {
}
}
//
// MARK: fromstr
//
impl MimeType {
/// Parse a mimetype from a string that may contain
/// whitespace or ";" parameters.
///
/// Parameters are discarded, write your own parser if you need them.
pub fn from_header(s: &str) -> Result<Self, <Self as FromStr>::Err> {
let s = s.trim();
let semi = s.find(';').unwrap_or(s.len());
let space = s.find(' ').unwrap_or(s.len());
let limit = semi.min(space);
let s = &s[0..limit];
let s = s.trim();
return Self::from_str(s);
}
}
impl FromStr for MimeType {
type Err = std::convert::Infallible;
@@ -251,7 +276,7 @@ impl FromStr for MimeType {
Ok(match s {
"application/octet-stream" => Self::Blob,
// MARK: Audio
// Audio
"audio/aac" => Self::Aac,
"audio/flac" => Self::Flac,
"audio/midi" | "audio/x-midi" => Self::Midi,
@@ -260,7 +285,7 @@ impl FromStr for MimeType {
"audio/wav" => Self::Wav,
"audio/webm" => Self::Weba,
// MARK: Video
// Video
"video/x-msvideo" => Self::Avi,
"video/mp4" => Self::Mp4,
"video/mpeg" => Self::Mpeg,
@@ -270,7 +295,7 @@ impl FromStr for MimeType {
"video/3gpp" => Self::ThreeGp,
"video/3gpp2" => Self::ThreeG2,
// MARK: Images
// Images
"image/apng" => Self::Apng,
"image/avif" => Self::Avif,
"image/bmp" => Self::Bmp,
@@ -281,8 +306,9 @@ impl FromStr for MimeType {
"image/svg+xml" => Self::Svg,
"image/tiff" => Self::Tiff,
"image/webp" => Self::Webp,
"image/qoi" => Self::Qoi,
// MARK: Text
// Text
"text/plain" => Self::Text,
"text/css" => Self::Css,
"text/csv" => Self::Csv,
@@ -292,11 +318,11 @@ impl FromStr for MimeType {
"application/ld+json" => Self::JsonLd,
"application/xml" | "text/xml" => Self::Xml,
// MARK: Documents
// Documents
"application/pdf" => Self::Pdf,
"application/rtf" => Self::Rtf,
// MARK: Archives
// Archives
"application/x-freearc" => Self::Arc,
"application/x-bzip" => Self::Bz,
"application/x-bzip2" => Self::Bz2,
@@ -308,14 +334,14 @@ impl FromStr for MimeType {
"application/x-tar" => Self::Tar,
"application/zip" | "application/x-zip-compressed" => Self::Zip,
// MARK: Fonts
// Fonts
"application/vnd.ms-fontobject" => Self::Eot,
"font/otf" => Self::Otf,
"font/ttf" => Self::Ttf,
"font/woff" => Self::Woff,
"font/woff2" => Self::Woff2,
// MARK: Applications
// Applications
"application/x-abiword" => Self::Abiword,
"application/vnd.amazon.ebook" => Self::Azw,
"application/x-cdf" => Self::Cda,
@@ -348,6 +374,10 @@ impl FromStr for MimeType {
}
}
//
// MARK: display
//
impl Display for MimeType {
/// Get a string representation of this mimetype.
///
@@ -368,7 +398,7 @@ impl Display for MimeType {
match self {
Self::Blob => write!(f, "application/octet-stream"),
// MARK: Audio
// Audio
Self::Aac => write!(f, "audio/aac"),
Self::Flac => write!(f, "audio/flac"),
Self::Midi => write!(f, "audio/midi"),
@@ -378,7 +408,7 @@ impl Display for MimeType {
Self::Wav => write!(f, "audio/wav"),
Self::Weba => write!(f, "audio/webm"),
// MARK: Video
// Video
Self::Avi => write!(f, "video/x-msvideo"),
Self::Mp4 => write!(f, "video/mp4"),
Self::Mpeg => write!(f, "video/mpeg"),
@@ -388,7 +418,7 @@ impl Display for MimeType {
Self::ThreeGp => write!(f, "video/3gpp"),
Self::ThreeG2 => write!(f, "video/3gpp2"),
// MARK: Images
// Images
Self::Apng => write!(f, "image/apng"),
Self::Avif => write!(f, "image/avif"),
Self::Bmp => write!(f, "image/bmp"),
@@ -399,8 +429,9 @@ impl Display for MimeType {
Self::Svg => write!(f, "image/svg+xml"),
Self::Tiff => write!(f, "image/tiff"),
Self::Webp => write!(f, "image/webp"),
Self::Qoi => write!(f, "image/qoi"),
// MARK: Text
// Text
Self::Text => write!(f, "text/plain"),
Self::Css => write!(f, "text/css"),
Self::Csv => write!(f, "text/csv"),
@@ -410,11 +441,11 @@ impl Display for MimeType {
Self::JsonLd => write!(f, "application/ld+json"),
Self::Xml => write!(f, "application/xml"),
// MARK: Documents
// Documents
Self::Pdf => write!(f, "application/pdf"),
Self::Rtf => write!(f, "application/rtf"),
// MARK: Archives
// Archives
Self::Arc => write!(f, "application/x-freearc"),
Self::Bz => write!(f, "application/x-bzip"),
Self::Bz2 => write!(f, "application/x-bzip2"),
@@ -426,14 +457,14 @@ impl Display for MimeType {
Self::Tar => write!(f, "application/x-tar"),
Self::Zip => write!(f, "application/zip"),
// MARK: Fonts
// Fonts
Self::Eot => write!(f, "application/vnd.ms-fontobject"),
Self::Otf => write!(f, "font/otf"),
Self::Ttf => write!(f, "font/ttf"),
Self::Woff => write!(f, "font/woff"),
Self::Woff2 => write!(f, "font/woff2"),
// MARK: Applications
// Applications
Self::Abiword => write!(f, "application/x-abiword"),
Self::Azw => write!(f, "application/vnd.amazon.ebook"),
Self::Cda => write!(f, "application/x-cdf"),
@@ -471,13 +502,15 @@ impl Display for MimeType {
}
impl MimeType {
// Must match `From<String>` above
//
// MARK: from extension
//
/// Try to guess a file's mime type from its extension.
/// `ext` should NOT start with a dot.
pub fn from_extension(ext: &str) -> Option<Self> {
Some(match ext {
// MARK: Audio
// Audio
"aac" => Self::Aac,
"flac" => Self::Flac,
"mid" | "midi" => Self::Midi,
@@ -487,7 +520,7 @@ impl MimeType {
"wav" => Self::Wav,
"weba" => Self::Weba,
// MARK: Video
// Video
"avi" => Self::Avi,
"mp4" => Self::Mp4,
"mpeg" => Self::Mpeg,
@@ -497,7 +530,7 @@ impl MimeType {
"3gp" => Self::ThreeGp,
"3g2" => Self::ThreeG2,
// MARK: Images
// Images
"apng" => Self::Apng,
"avif" => Self::Avif,
"bmp" => Self::Bmp,
@@ -508,8 +541,9 @@ impl MimeType {
"svg" => Self::Svg,
"tif" | "tiff" => Self::Tiff,
"webp" => Self::Webp,
"qoi" => Self::Qoi,
// MARK: Text
// Text
"txt" => Self::Text,
"css" => Self::Css,
"csv" => Self::Csv,
@@ -519,11 +553,11 @@ impl MimeType {
"jsonld" => Self::JsonLd,
"xml" => Self::Xml,
// MARK: Documents
// Documents
"pdf" => Self::Pdf,
"rtf" => Self::Rtf,
// MARK: Archives
// Archives
"arc" => Self::Arc,
"bz" => Self::Bz,
"bz2" => Self::Bz2,
@@ -535,14 +569,14 @@ impl MimeType {
"tar" => Self::Tar,
"zip" => Self::Zip,
// MARK: Fonts
// Fonts
"eot" => Self::Eot,
"otf" => Self::Otf,
"ttf" => Self::Ttf,
"woff" => Self::Woff,
"woff2" => Self::Woff2,
// MARK: Applications
// Applications
"abw" => Self::Abiword,
"azw" => Self::Azw,
"cda" => Self::Cda,
@@ -569,100 +603,209 @@ impl MimeType {
})
}
//
// MARK: to extension
//
/// Get the extension we use for files with this type.
/// Includes a dot. Might be the empty string.
pub fn extension(&self) -> &str {
/// Never includes a dot.
pub fn extension(&self) -> Option<&'static str> {
match self {
Self::Blob => "",
Self::Other(_) => "",
Self::Blob => None,
Self::Other(_) => None,
// MARK: Audio
Self::Aac => ".aac",
Self::Flac => ".flac",
Self::Midi => ".midi",
Self::Mp3 => ".mp3",
Self::Oga => ".oga",
Self::Opus => ".opus",
Self::Wav => ".wav",
Self::Weba => ".weba",
// Audio
Self::Aac => Some("aac"),
Self::Flac => Some("flac"),
Self::Midi => Some("midi"),
Self::Mp3 => Some("mp3"),
Self::Oga => Some("oga"),
Self::Opus => Some("opus"),
Self::Wav => Some("wav"),
Self::Weba => Some("weba"),
// MARK: Video
Self::Avi => ".avi",
Self::Mp4 => ".mp4",
Self::Mpeg => ".mpeg",
Self::Ogv => ".ogv",
Self::Ts => ".ts",
Self::WebmVideo => ".webm",
Self::ThreeGp => ".3gp",
Self::ThreeG2 => ".3g2",
// Video
Self::Avi => Some("avi"),
Self::Mp4 => Some("mp4"),
Self::Mpeg => Some("mpeg"),
Self::Ogv => Some("ogv"),
Self::Ts => Some("ts"),
Self::WebmVideo => Some("webm"),
Self::ThreeGp => Some("3gp"),
Self::ThreeG2 => Some("3g2"),
// MARK: Images
Self::Apng => ".apng",
Self::Avif => ".avif",
Self::Bmp => ".bmp",
Self::Gif => ".gif",
Self::Ico => ".ico",
Self::Jpg => ".jpg",
Self::Png => ".png",
Self::Svg => ".svg",
Self::Tiff => ".tiff",
Self::Webp => ".webp",
// Images
Self::Apng => Some("apng"),
Self::Avif => Some("avif"),
Self::Bmp => Some("bmp"),
Self::Gif => Some("gif"),
Self::Ico => Some("ico"),
Self::Jpg => Some("jpg"),
Self::Png => Some("png"),
Self::Svg => Some("svg"),
Self::Tiff => Some("tiff"),
Self::Webp => Some("webp"),
Self::Qoi => Some("qoi"),
// MARK: Text
Self::Text => ".txt",
Self::Css => ".css",
Self::Csv => ".csv",
Self::Html => ".html",
Self::Javascript => ".js",
Self::Json => ".json",
Self::JsonLd => ".jsonld",
Self::Xml => ".xml",
// Text
Self::Text => Some("txt"),
Self::Css => Some("css"),
Self::Csv => Some("csv"),
Self::Html => Some("html"),
Self::Javascript => Some("js"),
Self::Json => Some("json"),
Self::JsonLd => Some("jsonld"),
Self::Xml => Some("xml"),
// MARK: Documents
Self::Pdf => ".pdf",
Self::Rtf => ".rtf",
// Documents
Self::Pdf => Some("pdf"),
Self::Rtf => Some("rtf"),
// MARK: Archives
Self::Arc => ".arc",
Self::Bz => ".bz",
Self::Bz2 => ".bz2",
Self::Gz => ".gz",
Self::Jar => ".jar",
Self::Ogg => ".ogx",
Self::Rar => ".rar",
Self::SevenZ => ".7z",
Self::Tar => ".tar",
Self::Zip => ".zip",
// Archives
Self::Arc => Some("arc"),
Self::Bz => Some("bz"),
Self::Bz2 => Some("bz2"),
Self::Gz => Some("gz"),
Self::Jar => Some("jar"),
Self::Ogg => Some("ogx"),
Self::Rar => Some("rar"),
Self::SevenZ => Some("7z"),
Self::Tar => Some("tar"),
Self::Zip => Some("zip"),
// MARK: Fonts
Self::Eot => ".eot",
Self::Otf => ".otf",
Self::Ttf => ".ttf",
Self::Woff => ".woff",
Self::Woff2 => ".woff2",
// Fonts
Self::Eot => Some("eot"),
Self::Otf => Some("otf"),
Self::Ttf => Some("ttf"),
Self::Woff => Some("woff"),
Self::Woff2 => Some("woff2"),
// MARK: Applications
Self::Abiword => ".abw",
Self::Azw => ".azw",
Self::Cda => ".cda",
Self::Csh => ".csh",
Self::Doc => ".doc",
Self::Docx => ".docx",
Self::Epub => ".epub",
Self::Ics => ".ics",
Self::Mpkg => ".mpkg",
Self::Odp => ".odp",
Self::Ods => ".ods",
Self::Odt => ".odt",
Self::Php => ".php",
Self::Ppt => ".ppt",
Self::Pptx => ".pptx",
Self::Sh => ".sh",
Self::Vsd => ".vsd",
Self::Xhtml => ".xhtml",
Self::Xls => ".xls",
Self::Xlsx => ".xlsx",
Self::Xul => ".xul",
// Applications
Self::Abiword => Some("abw"),
Self::Azw => Some("azw"),
Self::Cda => Some("cda"),
Self::Csh => Some("csh"),
Self::Doc => Some("doc"),
Self::Docx => Some("docx"),
Self::Epub => Some("epub"),
Self::Ics => Some("ics"),
Self::Mpkg => Some("mpkg"),
Self::Odp => Some("odp"),
Self::Ods => Some("ods"),
Self::Odt => Some("odt"),
Self::Php => Some("php"),
Self::Ppt => Some("ppt"),
Self::Pptx => Some("pptx"),
Self::Sh => Some("sh"),
Self::Vsd => Some("vsd"),
Self::Xhtml => Some("xhtml"),
Self::Xls => Some("xls"),
Self::Xlsx => Some("xlsx"),
Self::Xul => Some("xul"),
}
}
//
// MARK: is_text
//
/// Returns true if this MIME type is always plain text.
pub fn is_text(&self) -> bool {
match self {
// Text types
Self::Text => true,
Self::Css => true,
Self::Csv => true,
Self::Html => true,
Self::Javascript => true,
Self::Json => true,
Self::JsonLd => true,
Self::Xml => true,
Self::Svg => true,
Self::Ics => true,
Self::Xhtml => true,
// Script types
Self::Csh => true,
Self::Php => true,
Self::Sh => true,
// All other types are not plain text
Self::Other(_) => false,
Self::Blob => false,
// Audio
Self::Aac => false,
Self::Flac => false,
Self::Midi => false,
Self::Mp3 => false,
Self::Oga => false,
Self::Opus => false,
Self::Wav => false,
Self::Weba => false,
// Video
Self::Avi => false,
Self::Mp4 => false,
Self::Mpeg => false,
Self::Ogv => false,
Self::Ts => false,
Self::WebmVideo => false,
Self::ThreeGp => false,
Self::ThreeG2 => false,
// Images
Self::Apng => false,
Self::Avif => false,
Self::Bmp => false,
Self::Gif => false,
Self::Ico => false,
Self::Jpg => false,
Self::Png => false,
Self::Qoi => false,
Self::Tiff => false,
Self::Webp => false,
// Documents
Self::Pdf => false,
Self::Rtf => false,
// Archives
Self::Arc => false,
Self::Bz => false,
Self::Bz2 => false,
Self::Gz => false,
Self::Jar => false,
Self::Ogg => false,
Self::Rar => false,
Self::SevenZ => false,
Self::Tar => false,
Self::Zip => false,
// Fonts
Self::Eot => false,
Self::Otf => false,
Self::Ttf => false,
Self::Woff => false,
Self::Woff2 => false,
// Applications
Self::Abiword => false,
Self::Azw => false,
Self::Cda => false,
Self::Doc => false,
Self::Docx => false,
Self::Epub => false,
Self::Mpkg => false,
Self::Odp => false,
Self::Ods => false,
Self::Odt => false,
Self::Ppt => false,
Self::Pptx => false,
Self::Vsd => false,
Self::Xls => false,
Self::Xlsx => false,
Self::Xul => false,
}
}
}

View File

@@ -1,15 +0,0 @@
[package]
name = "macro-assets"
version = { workspace = true }
rust-version = { workspace = true }
edition = { workspace = true }
[lints]
workspace = true
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }

View File

@@ -1,309 +0,0 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::{
Expr, Ident, LitStr, Result, Token, braced,
parse::{Parse, ParseStream},
parse_macro_input,
};
/// A macro for generating static asset handlers with compile-time embedding.
///
/// This macro generates:
/// - Individual structs for each asset that implement the `assetserver::Asset` trait
/// - Compile-time embedding of asset files using `include_bytes!`
/// - Optionally, an Axum router function that serves all assets
///
/// # Syntax
///
/// ```notrust
/// assets! {
/// prefix: "/assets"
/// router: router_function_name()
///
/// AssetName {
/// source: "path/to/file.ext",
/// target: "/public-url.ext"
/// }
///
/// AnotherAsset {
/// source: "path/to/another.ext",
/// target: "/another-url.ext"
/// }
/// }
/// ```
///
/// # Arguments
///
/// - `prefix`: The URL prefix for all assets (e.g., "/assets")
/// - `router`: (Optional) The name of a function to generate that returns `(&'static str, Router<()>)`
/// with routes for all assets
/// - Asset blocks: Each block defines an asset with:
/// - A name (identifier) for the generated struct
/// - `source`: The file system path to the asset (relative to the current file)
/// - `target`: The URL path where the asset will be served
///
/// # Generated Code
///
/// For each asset, the macro generates:
/// - A struct with the specified name
/// - An `assetserver::Asset` trait implementation containing:
/// - `URL_PREFIX`: The common prefix for all assets
/// - `URL`: The specific URL path for this asset
/// - `BYTES`: The embedded file contents as a byte slice
/// - Documentation showing the original asset definition
///
/// If `router` is specified, also generates a function that returns an Axum router
/// with all assets mounted at their target URLs.
///
/// # Example
///
/// ```notrust
/// assets! {
/// prefix: "/static"
/// router: static_router()
///
/// Logo {
/// source: "../images/logo.png",
/// target: "/logo.png"
/// }
/// }
/// ```
///
/// This generates structs implementing `assetserver::Asset` and optionally a router function:
///
/// ```notrust
/// pub fn static_router() -> (&'static str, ::axum::Router<()>) {
/// let router = ::axum::Router::new()
/// .route(Logo::URL, ::axum::routing::get(|| async {
/// (::axum::http::StatusCode::OK, Logo::BYTES)
/// }));
/// ("/static", router)
/// }
/// ```
#[proc_macro]
pub fn assets(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as AssetsInput);
let prefix = &input.prefix;
let asset_impls = input.assets.iter().map(|asset| {
let name = &asset.name;
let source = &asset.source;
let target = &asset.target;
// Generate documentation showing the original asset definition
let doc = format!(
"This is an `asset!`\n```notrust\n{} {{\n\tsource: \"{:?}\",\n\ttarget: \"{}\"\n}}\n```",
name, source, target
);
quote! {
#[expect(clippy::allow_attributes)]
#[allow(non_camel_case_types)]
#[doc = #doc]
pub struct #name {}
impl ::assetserver::Asset for #name {
const URL_PREFIX: &'static str = #prefix;
const URL_POSTFIX: &'static str = #target;
const URL: &'static str = concat!(#prefix, #target);
const BYTES: &'static [u8] = #source;
}
}
});
// Generate the router function if specified
let router_fn = if let Some(router_name) = &input.router {
let route_definitions = input.assets.iter().map(|asset| {
let name = &asset.name;
let headers = asset
.headers
.as_ref()
.map(|x| quote! { #x })
.unwrap_or(quote! { [] });
quote! {
.route(#name::URL_POSTFIX, ::axum::routing::get(|| async {
(
::axum::http::StatusCode::OK,
#headers,
#name::BYTES
)
}))
}
});
let router_doc = format!(
"Generated router function that serves {} asset(s) with prefix \"{}\"",
input.assets.len(),
prefix
);
quote! {
#[doc = #router_doc]
pub fn #router_name() -> (&'static str, ::axum::Router<()>) {
use ::tower_http::compression::{CompressionLayer, DefaultPredicate};
let compression: CompressionLayer = CompressionLayer::new()
.br(true)
.deflate(true)
.gzip(true)
.zstd(true)
.compress_when(DefaultPredicate::new());
let router = ::axum::Router::new()
#(#route_definitions)*
.layer(compression);
(#prefix, router)
}
}
} else {
quote! {}
};
let expanded = quote! {
#(#asset_impls)*
#router_fn
};
TokenStream::from(expanded)
}
/// Represents the complete input to the `assets!` macro
struct AssetsInput {
prefix: String,
router: Option<Ident>,
assets: Vec<AssetDefinition>,
}
/// Represents a single asset definition within the macro
struct AssetDefinition {
name: Ident,
source: Expr,
target: String,
headers: Option<Expr>,
}
impl Parse for AssetsInput {
fn parse(input: ParseStream<'_>) -> Result<Self> {
// Parse "prefix:"
let _prefix_ident: Ident = input.parse()?;
let _colon: Token![:] = input.parse()?;
let prefix_lit: LitStr = input.parse()?;
let prefix = prefix_lit.value();
// Try to parse optional "router:" parameter
let router = if input.peek(Ident) {
let peek_ident: Ident = input.fork().parse()?;
if peek_ident == "router" {
let _router_ident: Ident = input.parse()?;
let _colon: Token![:] = input.parse()?;
let router_name: Ident = input.parse()?;
// Parse the parentheses after the function name
let _paren_content;
syn::parenthesized!(_paren_content in input);
Some(router_name)
} else {
None
}
} else {
None
};
let mut assets = Vec::new();
// Parse asset definitions until we reach the end
while !input.is_empty() {
let asset = input.parse::<AssetDefinition>()?;
assets.push(asset);
}
Ok(AssetsInput {
prefix,
router,
assets,
})
}
}
impl Parse for AssetDefinition {
fn parse(input: ParseStream<'_>) -> Result<Self> {
// Parse the asset name
let name: Ident = input.parse()?;
// Parse the braced content
let content;
braced!(content in input);
// Parse fields in any order
let mut source: Option<Expr> = None;
let mut target: Option<String> = None;
let mut headers: Option<Expr> = None;
while !content.is_empty() {
// Parse field name
let field_name: Ident = content.parse()?;
let _colon: Token![:] = content.parse()?;
// Parse field value based on name
match field_name.to_string().as_str() {
"source" => {
if source.is_some() {
return Err(syn::Error::new(
field_name.span(),
"duplicate 'source' field",
));
}
source = Some(content.parse()?);
}
"target" => {
if target.is_some() {
return Err(syn::Error::new(
field_name.span(),
"duplicate 'target' field",
));
}
let target_lit: LitStr = content.parse()?;
target = Some(target_lit.value());
}
"headers" => {
if headers.is_some() {
return Err(syn::Error::new(
field_name.span(),
"duplicate 'headers' field",
));
}
headers = Some(content.parse()?);
}
_ => {
return Err(syn::Error::new(
field_name.span(),
format!(
"unknown field '{}', expected 'source', 'target', or 'headers'",
field_name
),
));
}
}
// Parse comma if not at end
if !content.is_empty() {
content.parse::<Token![,]>()?;
}
}
// Validate required fields
let source = source
.ok_or_else(|| syn::Error::new(name.span(), "missing required field 'source'"))?;
let target = target
.ok_or_else(|| syn::Error::new(name.span(), "missing required field 'target'"))?;
Ok(AssetDefinition {
name,
source,
target,
headers,
})
}
}

View File

@@ -9,16 +9,16 @@ workspace = true
[dependencies]
libservice = { workspace = true }
macro-assets = { workspace = true }
macro-sass = { workspace = true }
assetserver = { workspace = true }
toolbox = { workspace = true }
page = { workspace = true }
md-footnote = { workspace = true }
markdown-it = { workspace = true }
axum = { workspace = true }
tracing = { workspace = true }
maud = { workspace = true }
markdown-it = { workspace = true }
emojis = { workspace = true }
strum = { workspace = true }
chrono = { workspace = true }
@@ -27,5 +27,5 @@ lazy_static = { workspace = true }
toml = { workspace = true }
serde = { workspace = true }
reqwest = { workspace = true }
tower-http = { workspace = true }
tokio = { workspace = true }
tower-http = { workspace = true }

View File

@@ -6,6 +6,7 @@ img {
border-radius: 15px;
border: solid .2rem transparent;
transition: 150ms;
image-rendering: pixelated;
}
img:hover {

View File

@@ -81,9 +81,11 @@ body {
color: var(--fgColor);
}
main {
margin-top: 2ex;
overflow-wrap: break-word;
div.wrapper {
min-height: 100vh;
display: flex;
flex-direction: column;
justify-content: space-between;
}
hr.footline {
@@ -92,18 +94,14 @@ hr.footline {
hr {
border: 1pt dashed;
width: 100%;
}
iframe {
max-width: 90%;
}
.wrapper {
min-height: 100vh;
display: flex;
flex-direction: column;
justify-content: space-between;
}
.footContainer {
padding-top: 0;

View File

@@ -43,33 +43,42 @@ a:hover {
transition: 150ms;
}
footer {
font-size: 1.4rem;
clear: both;
opacity: 0.5;
}
footer {
text-align: left
}
.footnote-definition {
margin: 0 0 0 2rem;
}
.footnote-definition-label {
color: var(--metaColor);
}
.footnote-definition p {
.footnote-item p {
display: inline;
padding: 0 0 0 1rem;
}
hr.footnotes-sep {
margin: 5rem 0 0 0;
}
.footnote-ref > a {
padding: 0 2pt 0.8rem 2pt !important;
}
a.footnote-backref, .footnote-ref > a
{
color: var(--metaColor);
padding: 0 2pt 0 2pt;
}
a.footnote-backref:hover,
.footnote-ref > a:hover
{
color: var(--bgColor);
background-color: var(--metaColor);
}
.footContainer {
display: flex;
flex-wrap: wrap;

View File

@@ -1,511 +0,0 @@
use lazy_static::lazy_static;
use markdown_it::generics::inline::full_link;
use markdown_it::parser::block::{BlockRule, BlockState};
use markdown_it::parser::core::Root;
use markdown_it::parser::inline::{InlineRule, InlineState};
use markdown_it::{MarkdownIt, Node, NodeValue, Renderer};
use maud::{Markup, PreEscaped, Render, html};
use page::{Page, PageMetadata};
use std::str::FromStr;
use crate::components::fa::FAIcon;
use crate::components::mangle::{MangledBetaEmail, MangledGoogleEmail};
use crate::components::misc::Backlinks;
lazy_static! {
static ref MdParser: MarkdownIt = {
let mut md = markdown_it::MarkdownIt::new();
{
use markdown_it::plugins::cmark::*;
inline::newline::add(&mut md);
inline::escape::add(&mut md);
inline::backticks::add(&mut md);
inline::emphasis::add(&mut md);
// Replaced with smart links
//inline::link::add(&mut md);
full_link::add::<false>(&mut md, |href, title| {
Node::new(SmartLink {
url: href.unwrap_or_default(),
title,
})
});
inline::image::add(&mut md);
inline::autolink::add(&mut md);
inline::entity::add(&mut md);
block::code::add(&mut md);
block::fence::add(&mut md);
block::blockquote::add(&mut md);
block::hr::add(&mut md);
block::list::add(&mut md);
block::reference::add(&mut md);
block::heading::add(&mut md);
block::lheading::add(&mut md);
block::paragraph::add(&mut md);
}
markdown_it::plugins::html::add(&mut md);
md.block.add_rule::<YamlFrontMatter>().before_all();
md.block.add_rule::<TomlFrontMatter>().before_all();
md.inline.add_rule::<InlineEmote>();
md.inline.add_rule::<InlineEmote>();
md.inline.add_rule::<InlineMdx>();
md
};
}
pub struct Markdown<'a>(pub &'a str);
impl Render for Markdown<'_> {
fn render(&self) -> Markup {
let md = Self::parse(self.0);
let html = md.render();
return PreEscaped(html);
}
}
impl Markdown<'_> {
pub fn parse(md_str: &str) -> Node {
MdParser.parse(md_str)
}
}
//
// MARK: helpers
//
/// Try to read page metadata from a markdown file's frontmatter.
/// - returns `none` if there is no frontmatter
/// - returns an error if we fail to parse frontmatter
pub fn meta_from_markdown(root_node: &Node) -> Result<Option<PageMetadata>, toml::de::Error> {
root_node
.children
.first()
.and_then(|x| x.cast::<TomlFrontMatter>())
.map(|x| toml::from_str::<PageMetadata>(&x.content))
.map_or(Ok(None), |v| v.map(Some))
}
pub fn page_from_markdown(md: impl Into<String>, default_image: Option<String>) -> Page {
let md: String = md.into();
let md = Markdown::parse(&md);
let mut meta = meta_from_markdown(&md)
.unwrap_or(Some(PageMetadata {
title: "Invalid frontmatter!".into(),
..Default::default()
}))
.unwrap_or_default();
if meta.image.is_none() {
meta.image = default_image
}
let html = PreEscaped(md.render());
Page {
meta,
generate_html: Box::new(move |page, _| {
let html = html.clone();
Box::pin(async move {
html! {
@if let Some(slug) = &page.meta.slug {
(Backlinks(&[("/", "home")], slug))
}
(html)
}
})
}),
..Default::default()
}
}
//
// MARK: extensions
//
//
// MARK: smart link
//
#[derive(Debug)]
pub struct SmartLink {
pub url: String,
pub title: Option<String>,
}
impl NodeValue for SmartLink {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
let mut attrs = node.attrs.clone();
attrs.push(("href", self.url.clone()));
if let Some(title) = &self.title {
attrs.push(("title", title.clone()));
}
let external = !(self.url.starts_with(".") || self.url.starts_with("/"));
// Open external links in a new tab
if external {
attrs.push(("target", "_blank".into()));
attrs.push(("rel", "noopener noreferrer".into()));
}
fmt.open("a", &attrs);
fmt.contents(&node.children);
fmt.close("a");
}
}
//
// MARK: emote
//
#[derive(Debug)]
pub struct InlineEmote(String);
impl NodeValue for InlineEmote {
fn render(&self, _node: &Node, fmt: &mut dyn Renderer) {
fmt.text_raw(self.0.as_str());
}
}
impl InlineRule for InlineEmote {
const MARKER: char = ':';
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let input = &state.src[state.pos..state.pos_max];
if !input.starts_with(':') {
return None;
}
let end_idx = input[1..].find(':')? + 1;
let code = &input[1..end_idx];
let mut emote = None;
if emote.is_none()
&& let Some(code) = code.strip_prefix("fa-")
{
emote = FAIcon::from_str(code).ok().map(|x| x.render().0)
}
if emote.is_none() {
emote = emojis::get_by_shortcode(code).map(|x| x.to_string());
}
Some((Node::new(InlineEmote(emote?)), end_idx + 1))
}
}
//
// MARK: mdx
//
#[derive(Debug)]
pub struct InlineMdx(String);
impl NodeValue for InlineMdx {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
if mdx_style(&self.0, node, fmt) {
return;
}
if mdx_include(&self.0, node, fmt) {
return;
}
fmt.open("code", &[]);
fmt.text(&self.0);
fmt.close("code");
}
}
impl InlineRule for InlineMdx {
const MARKER: char = '{';
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let input = &state.src[state.pos..state.pos_max];
if !input.starts_with('{') {
return None;
}
let mut balance = 1;
let mut end = 1;
for i in input[1..].bytes() {
match i {
b'}' => balance -= 1,
b'{' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return None;
}
let content = &input[1..end];
Some((Node::new(InlineMdx(content.to_owned())), content.len() + 2))
}
}
fn mdx_style(mdx: &str, _node: &Node, fmt: &mut dyn Renderer) -> bool {
// Parse inside of mdx: `color(value, "text")`
let mdx = mdx
.trim()
.trim_start_matches('{')
.trim_end_matches('}')
.trim();
// Find the function name (everything before the opening parenthesis)
let paren_pos = match mdx.find('(') {
Some(x) => x,
None => return false,
};
if mdx[..paren_pos].trim() != "color" {
return false;
};
// Find matching closing parenthesis
let skip = paren_pos + 1;
let mut balance = 1;
let mut end = skip;
for i in mdx[skip..].bytes() {
match i {
b')' => balance -= 1,
b'(' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return false;
}
let args = mdx[skip..end].trim();
// Parse arguments: should be "value, text" or "value, \"text\""
let comma_pos = match args.find(',') {
Some(x) => x,
None => return false,
};
let value = args[..comma_pos].trim();
let text = args[comma_pos + 1..].trim();
// Strip quotes from text if present
let text = if (text.starts_with('"') && text.ends_with('"'))
|| (text.starts_with('\'') && text.ends_with('\''))
{
&text[1..text.len() - 1]
} else {
text
};
let mut style_str = String::new();
if value.starts_with("#") {
style_str.push_str("color:");
style_str.push_str(value);
style_str.push(';');
} else if value.starts_with("--") {
style_str.push_str("color:var(");
style_str.push_str(value);
style_str.push_str(");");
} else {
style_str.push_str("color:");
style_str.push_str(value);
style_str.push(';');
}
fmt.open("span", &[("style", style_str)]);
fmt.text(text);
fmt.close("span");
return true;
}
fn mdx_include(mdx: &str, _node: &Node, fmt: &mut dyn Renderer) -> bool {
// Parse inside of mdx: `include(<args>)`
let args = {
let mdx = mdx
.trim()
.trim_start_matches('{')
.trim_end_matches('}')
.trim();
if !mdx.starts_with("include(") {
return false;
}
let skip = 8;
let mut balance = 1;
let mut end = skip;
for i in mdx[skip..].bytes() {
match i {
b')' => balance -= 1,
b'(' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return false;
}
let args = mdx[skip..end].trim();
let trail = mdx[end + 1..].trim();
if !trail.is_empty() {
return false;
}
args
};
let str = match args {
"email_beta" => MangledBetaEmail {}.render().0,
"email_goog" => MangledGoogleEmail {}.render().0,
_ => return false,
};
fmt.text_raw(&str);
return true;
}
//
// MARK: yaml frontmatter
//
#[derive(Debug)]
pub struct YamlFrontMatter {
#[expect(dead_code)]
pub content: String,
}
impl NodeValue for YamlFrontMatter {
fn render(&self, _node: &Node, _fmt: &mut dyn Renderer) {}
}
impl BlockRule for YamlFrontMatter {
fn run(state: &mut BlockState<'_, '_>) -> Option<(Node, usize)> {
// check the parent is the document Root
if !state.node.is::<Root>() {
return None;
}
// check we are on the first line of the document
if state.line != 0 {
return None;
}
// check line starts with opening dashes
let opening = state
.get_line(state.line)
.chars()
.take_while(|c| *c == '-')
.collect::<String>();
if !opening.starts_with("---") {
return None;
}
// Search for the end of the block
let mut next_line = state.line;
loop {
next_line += 1;
if next_line >= state.line_max {
return None;
}
let line = state.get_line(next_line);
if line.starts_with(&opening) {
break;
}
}
let (content, _) = state.get_lines(state.line + 1, next_line, 0, true);
Some((Node::new(YamlFrontMatter { content }), next_line + 1))
}
}
//
// MARK: toml frontmatter
//
#[derive(Debug)]
pub struct TomlFrontMatter {
pub content: String,
}
impl NodeValue for TomlFrontMatter {
fn render(&self, _node: &Node, _fmt: &mut dyn Renderer) {}
}
impl BlockRule for TomlFrontMatter {
fn run(state: &mut BlockState<'_, '_>) -> Option<(Node, usize)> {
if !state.node.is::<Root>() {
return None;
}
if state.line != 0 {
return None;
}
let opening = state
.get_line(state.line)
.chars()
.take_while(|c| *c == '+')
.collect::<String>();
if !opening.starts_with("+++") {
return None;
}
let mut next_line = state.line;
loop {
next_line += 1;
if next_line >= state.line_max {
return None;
}
let line = state.get_line(next_line);
if line.starts_with(&opening) {
break;
}
}
let (content, _) = state.get_lines(state.line + 1, next_line, 0, true);
Some((Node::new(TomlFrontMatter { content }), next_line + 1))
}
}

View File

@@ -0,0 +1,45 @@
use std::str::FromStr;
use markdown_it::parser::inline::{InlineRule, InlineState};
use markdown_it::{Node, NodeValue, Renderer};
use maud::Render;
use crate::components::fa::FAIcon;
#[derive(Debug)]
pub struct InlineEmote(String);
impl NodeValue for InlineEmote {
fn render(&self, _node: &Node, fmt: &mut dyn Renderer) {
fmt.text_raw(self.0.as_str());
}
}
impl InlineRule for InlineEmote {
const MARKER: char = ':';
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let input = &state.src[state.pos..state.pos_max];
if !input.starts_with(':') {
return None;
}
let end_idx = input[1..].find(':')? + 1;
let code = &input[1..end_idx];
let mut emote = None;
if emote.is_none()
&& let Some(code) = code.strip_prefix("fa-")
{
emote = FAIcon::from_str(code).ok().map(|x| x.render().0)
}
if emote.is_none() {
emote = emojis::get_by_shortcode(code).map(|x| x.to_string());
}
Some((Node::new(InlineEmote(emote?)), end_idx + 1))
}
}

View File

@@ -0,0 +1,108 @@
use markdown_it::parser::block::{BlockRule, BlockState};
use markdown_it::parser::core::Root;
use markdown_it::{Node, NodeValue, Renderer};
//
// MARK: yaml
//
#[derive(Debug)]
pub struct YamlFrontMatter {
#[expect(dead_code)]
pub content: String,
}
impl NodeValue for YamlFrontMatter {
fn render(&self, _node: &Node, _fmt: &mut dyn Renderer) {}
}
impl BlockRule for YamlFrontMatter {
fn run(state: &mut BlockState<'_, '_>) -> Option<(Node, usize)> {
// check the parent is the document Root
if !state.node.is::<Root>() {
return None;
}
// check we are on the first line of the document
if state.line != 0 {
return None;
}
// check line starts with opening dashes
let opening = state
.get_line(state.line)
.chars()
.take_while(|c| *c == '-')
.collect::<String>();
if !opening.starts_with("---") {
return None;
}
// Search for the end of the block
let mut next_line = state.line;
loop {
next_line += 1;
if next_line >= state.line_max {
return None;
}
let line = state.get_line(next_line);
if line.starts_with(&opening) {
break;
}
}
let (content, _) = state.get_lines(state.line + 1, next_line, 0, true);
Some((Node::new(YamlFrontMatter { content }), next_line + 1))
}
}
//
// MARK: toml
//
#[derive(Debug)]
pub struct TomlFrontMatter {
pub content: String,
}
impl NodeValue for TomlFrontMatter {
fn render(&self, _node: &Node, _fmt: &mut dyn Renderer) {}
}
impl BlockRule for TomlFrontMatter {
fn run(state: &mut BlockState<'_, '_>) -> Option<(Node, usize)> {
if !state.node.is::<Root>() {
return None;
}
if state.line != 0 {
return None;
}
let opening = state
.get_line(state.line)
.chars()
.take_while(|c| *c == '+')
.collect::<String>();
if !opening.starts_with("+++") {
return None;
}
let mut next_line = state.line;
loop {
next_line += 1;
if next_line >= state.line_max {
return None;
}
let line = state.get_line(next_line);
if line.starts_with(&opening) {
break;
}
}
let (content, _) = state.get_lines(state.line + 1, next_line, 0, true);
Some((Node::new(TomlFrontMatter { content }), next_line + 1))
}
}

View File

@@ -0,0 +1,30 @@
use markdown_it::{Node, NodeValue, Renderer};
#[derive(Debug)]
pub struct SmartLink {
pub url: String,
pub title: Option<String>,
}
impl NodeValue for SmartLink {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
let mut attrs = node.attrs.clone();
attrs.push(("href", self.url.clone()));
if let Some(title) = &self.title {
attrs.push(("title", title.clone()));
}
let external = !(self.url.starts_with(".") || self.url.starts_with("/"));
// Open external links in a new tab
if external {
attrs.push(("target", "_blank".into()));
attrs.push(("rel", "noopener noreferrer".into()));
}
fmt.open("a", &attrs);
fmt.contents(&node.children);
fmt.close("a");
}
}

View File

@@ -0,0 +1,195 @@
use markdown_it::parser::inline::{InlineRule, InlineState};
use markdown_it::{Node, NodeValue, Renderer};
use maud::Render;
use crate::components::mangle::{MangledBetaEmail, MangledGoogleEmail};
#[derive(Debug)]
pub struct InlineMdx(String);
impl NodeValue for InlineMdx {
fn render(&self, node: &Node, fmt: &mut dyn Renderer) {
if mdx_style(&self.0, node, fmt) {
return;
}
if mdx_include(&self.0, node, fmt) {
return;
}
fmt.open("code", &[]);
fmt.text(&self.0);
fmt.close("code");
}
}
impl InlineRule for InlineMdx {
const MARKER: char = '{';
fn run(state: &mut InlineState<'_, '_>) -> Option<(Node, usize)> {
let input = &state.src[state.pos..state.pos_max];
if !input.starts_with('{') {
return None;
}
let mut balance = 1;
let mut end = 1;
for i in input[1..].bytes() {
match i {
b'}' => balance -= 1,
b'{' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return None;
}
let content = &input[1..end];
Some((Node::new(InlineMdx(content.to_owned())), content.len() + 2))
}
}
fn mdx_style(mdx: &str, _node: &Node, fmt: &mut dyn Renderer) -> bool {
// Parse inside of mdx: `color(value, "text")`
let mdx = mdx
.trim()
.trim_start_matches('{')
.trim_end_matches('}')
.trim();
// Find the function name (everything before the opening parenthesis)
let paren_pos = match mdx.find('(') {
Some(x) => x,
None => return false,
};
if mdx[..paren_pos].trim() != "color" {
return false;
};
// Find matching closing parenthesis
let skip = paren_pos + 1;
let mut balance = 1;
let mut end = skip;
for i in mdx[skip..].bytes() {
match i {
b')' => balance -= 1,
b'(' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return false;
}
let args = mdx[skip..end].trim();
// Parse arguments: should be "value, text" or "value, \"text\""
let comma_pos = match args.find(',') {
Some(x) => x,
None => return false,
};
let value = args[..comma_pos].trim();
let text = args[comma_pos + 1..].trim();
// Strip quotes from text if present
let text = if (text.starts_with('"') && text.ends_with('"'))
|| (text.starts_with('\'') && text.ends_with('\''))
{
&text[1..text.len() - 1]
} else {
text
};
let mut style_str = String::new();
if value.starts_with("#") {
style_str.push_str("color:");
style_str.push_str(value);
style_str.push(';');
} else if value.starts_with("--") {
style_str.push_str("color:var(");
style_str.push_str(value);
style_str.push_str(");");
} else {
style_str.push_str("color:");
style_str.push_str(value);
style_str.push(';');
}
fmt.open("span", &[("style", style_str)]);
fmt.text(text);
fmt.close("span");
return true;
}
fn mdx_include(mdx: &str, _node: &Node, fmt: &mut dyn Renderer) -> bool {
// Parse inside of mdx: `include(<args>)`
let args = {
let mdx = mdx
.trim()
.trim_start_matches('{')
.trim_end_matches('}')
.trim();
if !mdx.starts_with("include(") {
return false;
}
let skip = 8;
let mut balance = 1;
let mut end = skip;
for i in mdx[skip..].bytes() {
match i {
b')' => balance -= 1,
b'(' => balance += 1,
_ => {}
}
if balance == 0 {
break;
}
end += 1;
}
if balance != 0 {
return false;
}
let args = mdx[skip..end].trim();
let trail = mdx[end + 1..].trim();
if !trail.is_empty() {
return false;
}
args
};
let str = match args {
"email_beta" => MangledBetaEmail {}.render().0,
"email_goog" => MangledGoogleEmail {}.render().0,
_ => return false,
};
fmt.text_raw(&str);
return true;
}

View File

@@ -0,0 +1,98 @@
use lazy_static::lazy_static;
use markdown_it::generics::inline::full_link;
use markdown_it::{MarkdownIt, Node};
use maud::{Markup, PreEscaped, Render};
use page::servable::PageMetadata;
use crate::components::md::emote::InlineEmote;
use crate::components::md::frontmatter::{TomlFrontMatter, YamlFrontMatter};
use crate::components::md::link::SmartLink;
use crate::components::md::mdx::InlineMdx;
mod emote;
mod frontmatter;
mod link;
mod mdx;
lazy_static! {
static ref MdParser: MarkdownIt = {
let mut md = markdown_it::MarkdownIt::new();
{
use markdown_it::plugins::cmark::*;
inline::newline::add(&mut md);
inline::escape::add(&mut md);
inline::backticks::add(&mut md);
inline::emphasis::add(&mut md);
// Replaced with smart links
//inline::link::add(&mut md);
full_link::add::<false>(&mut md, |href, title| {
Node::new(SmartLink {
url: href.unwrap_or_default(),
title,
})
});
inline::image::add(&mut md);
inline::autolink::add(&mut md);
inline::entity::add(&mut md);
block::code::add(&mut md);
block::fence::add(&mut md);
block::blockquote::add(&mut md);
block::hr::add(&mut md);
block::list::add(&mut md);
block::reference::add(&mut md);
block::heading::add(&mut md);
block::lheading::add(&mut md);
block::paragraph::add(&mut md);
}
{
markdown_it::plugins::html::add(&mut md);
md_footnote::add(&mut md);
}
md.block.add_rule::<YamlFrontMatter>().before_all();
md.block.add_rule::<TomlFrontMatter>().before_all();
md.inline.add_rule::<InlineEmote>();
md.inline.add_rule::<InlineMdx>();
md
};
}
pub struct Markdown<'a>(pub &'a str);
impl Render for Markdown<'_> {
fn render(&self) -> Markup {
let md = Self::parse(self.0);
let html = md.render();
return PreEscaped(html);
}
}
impl Markdown<'_> {
pub fn parse(md_str: &str) -> Node {
MdParser.parse(md_str)
}
}
/// Try to read page metadata from a markdown file's frontmatter.
/// - returns `none` if there is no frontmatter
/// - returns an error if we fail to parse frontmatter
pub fn meta_from_markdown(root_node: &Node) -> Result<Option<PageMetadata>, toml::de::Error> {
root_node
.children
.first()
.and_then(|x| x.cast::<TomlFrontMatter>())
.map(|x| toml::from_str::<PageMetadata>(&x.content))
.map_or(Ok(None), |v| v.map(Some))
}

View File

@@ -15,20 +15,3 @@ impl<T: Render> Render for FarLink<'_, T> {
)
}
}
pub struct Backlinks<'a>(pub &'a [(&'a str, &'a str)], pub &'a str);
impl Render for Backlinks<'_> {
fn render(&self) -> Markup {
html! {
div {
@for (url, text) in self.0 {
a href=(url) style="padding-left:5pt;padding-right:5pt;" { (text) }
"/"
}
span style="color:var(--metaColor);padding-left:5pt;padding-right:5pt;" { (self.1) }
}
}
}
}

View File

@@ -1,7 +1,7 @@
+++
title = "What's a \"betalupi?\""
author = "Mark"
slug = "whats-a-betalupi"
backlinks = true
+++
[es]: https://github.com/endless-sky/endless-sky
@@ -23,12 +23,12 @@ A snippet of the [_Endless Sky_][es] map is below.
<br/>
**In other words:** Try finding a `.com` domain that...
**In other words:** try finding a `.com` domain that...
- Isn't already taken
- Doesn't sound awful
- Isn't owned by a scalper that's selling it for $300"
- Isn't owned by a scalper that's selling it for $300
<br/>
<img alt="betalupi map" src="/assets/img/betalupi.png"></img>
<img class="img-placeholder" src="/assets/img/betalupi.png?t=maxdim(50,50)" data-large="/assets/img/betalupi.png" style="width:100%;height=10rem;"></img>

View File

@@ -1,7 +1,7 @@
+++
title = "Mark's Handouts"
author = "Mark"
slug = "handouts"
backlinks = true
+++
# Mark's Handouts
@@ -13,8 +13,6 @@ arguably the best math circle in the western world. We teach students mathematic
far beyond the regular school curriculum, much like [AOPS](https://artofproblemsolving.com)
and the [BMC](https://mathcircle.berkeley.edu).
<br></br>
{{color(--pink, "For my students:")}} \
Don't look at solutions we haven't discussed,
and don't start any handouts before class. That spoils all the fun!
@@ -36,7 +34,6 @@ If the class finishes early, the lesson is either too short or too easy.
The sources for all these handouts are available [here](https://git.betalupi.com/mark/handouts).\
Some are written in LaTeX, some are in [Typst](https://typst.app). \
The latter is vastly superior.
<br></br>
<hr></hr>
<hr style="margin:5rem 0 5rem 0;"></hr>
<br></br>

View File

@@ -4,10 +4,9 @@ use std::{
time::{Duration, Instant},
};
use assetserver::Asset;
use chrono::{DateTime, TimeDelta, Utc};
use maud::{Markup, PreEscaped, html};
use page::{DeviceType, Page, RequestContext};
use page::{DeviceType, RenderContext, servable::Page};
use parking_lot::Mutex;
use serde::Deserialize;
use tracing::{debug, warn};
@@ -15,9 +14,9 @@ use tracing::{debug, warn};
use crate::{
components::{
md::{Markdown, meta_from_markdown},
misc::{Backlinks, FarLink},
misc::FarLink,
},
routes::assets::Image_Icon,
pages::{MAIN_TEMPLATE, backlinks, footer},
};
#[derive(Debug, Deserialize)]
@@ -114,11 +113,7 @@ async fn get_index() -> Result<Vec<HandoutEntry>, reqwest::Error> {
return Ok(res);
}
fn build_list_for_group(
handouts: &[HandoutEntry],
group: &str,
req_ctx: &RequestContext,
) -> Markup {
fn build_list_for_group(handouts: &[HandoutEntry], group: &str, req_ctx: &RenderContext) -> Markup {
let mobile = req_ctx.client_info.device_type == DeviceType::Mobile;
if mobile {
@@ -193,75 +188,84 @@ pub fn handouts() -> Page {
let mut meta = meta_from_markdown(&md).unwrap().unwrap();
if meta.image.is_none() {
meta.image = Some(Image_Icon::URL.to_owned());
meta.image = Some("/assets/img/icon.png".to_owned());
}
let html = PreEscaped(md.render());
Page {
meta,
html_ttl: Some(TimeDelta::seconds(300)),
generate_html: Box::new(move |page, req_ctx| {
let html = html.clone(); // TODO: find a way to not clone here
MAIN_TEMPLATE
.derive(meta, move |page, ctx| {
let html = html.clone();
let index = index.clone();
Box::pin(async move {
let handouts = index.get().await;
let fallback = html! {
span style="color:var(--yellow)" {
"Could not load handouts, something broke."
}
" "
(
FarLink(
"https://git.betalupi.com/Mark/-/packages/generic/ormc-handouts/latest",
"Try this direct link."
)
)
};
let warmups = match &*handouts {
Ok(handouts) => build_list_for_group(handouts, "Warm-Ups", req_ctx),
Err(error) => {
warn!("Could not load handout index: {error:?}");
fallback.clone()
}
};
let advanced = match &*handouts {
Ok(handouts) => build_list_for_group(handouts, "Advanced", req_ctx),
Err(_) => fallback,
};
html! {
@if let Some(slug) = &page.meta.slug {
(Backlinks(&[("/", "home")], slug))
}
(html)
(Markdown(concat!(
"## Warm-Ups",
"\n\n",
"Students never show up on time. Some come early, some come late. Warm-ups ",
"are my solution to this problem: we hand these out as students walk in, ",
"giving them something to do until we can start the lesson.",
)))
(warmups)
br {}
(Markdown(concat!(
"## Advanced",
"\n\n",
"The highest level of the ORMC, and the group I spend most of my time with. ",
"Students in ORMC Advanced are in high school, which means ",
"they're ~14-18 years old.",
)))
(advanced)
br {}
}
})
}),
}
render(html, index, page, ctx)
})
.html_ttl(Some(TimeDelta::seconds(300)))
}
fn render<'a>(
html: Markup,
index: Arc<CachedRequest<Result<Vec<HandoutEntry>, reqwest::Error>>>,
_page: &'a Page,
ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Markup> + Send + Sync + 'a>> {
Box::pin(async move {
let handouts = index.get().await;
let fallback = html! {
span style="color:var(--yellow)" {
"Could not load handouts, something broke."
}
" "
(
FarLink(
"https://git.betalupi.com/Mark/-/packages/generic/ormc-handouts/latest",
"Try this direct link."
)
)
};
let warmups = match &*handouts {
Ok(handouts) => build_list_for_group(handouts, "Warm-Ups", ctx),
Err(error) => {
warn!("Could not load handout index: {error:?}");
fallback.clone()
}
};
let advanced = match &*handouts {
Ok(handouts) => build_list_for_group(handouts, "Advanced", ctx),
Err(_) => fallback,
};
html! {
div class="wrapper" style="margin-top:3ex;" {
@if let Some(backlinks) = backlinks(ctx) {
(backlinks)
}
(html)
(Markdown(concat!(
"## Warm-Ups",
"\n\n",
"Students never show up on time. Some come early, some come late. Warm-ups ",
"are my solution to this problem: we hand these out as students walk in, ",
"giving them something to do until we can start the lesson.",
)))
(warmups)
br {}
(Markdown(concat!(
"## Advanced",
"\n\n",
"The highest level of the ORMC, and the group I spend most of my time with. ",
"Students in ORMC Advanced are in high school, which means ",
"they're ~14-18 years old.",
)))
(advanced)
br {}
(footer())
}
}
})
}

View File

@@ -1,9 +1,7 @@
+++
title = "HtWaH: Typesetting"
author = "Mark"
# TODO: many slugs, htwah/typesetting
slug = "handouts"
backlinks = true
+++
## Table of Contents
@@ -167,7 +165,7 @@ The document itself should also be numbered. In most cases, a `\today` on the fr
This helps synchronize the handout you _think_ the class has with the handout that the class _really_ has.
Future instructors {{color(--grey, "(and future you")}} will be thankful.
Future instructors {{color(--grey, "(and future you)")}} will be thankful.
### Items

View File

@@ -5,11 +5,11 @@ Also see [what's a "betalupi?"](/whats-a-betalupi)
- [Handouts](/handouts): Math circle lessons I've written
- [Links](/links): Interesting parts of the internet
<hr style="margin-top: 8rem; margin-bottom: 8rem"/>
<hr style="margin-top: 5rem; margin-bottom: 5rem"/>
## Projects
- **RedoxOS**, a general-purpose, microkernel-based operating system written in Rust. _{{color(--grey, "[enthusiast]")}}
- **RedoxOS**, a general-purpose, microkernel-based operating system written in Rust. _{{color(--grey, "[enthusiast]")}}_
- {{color(--grey, "Status: ")}} {{color(--yellow, "Passive.")}}
- {{color(--grey, "Website: ")}} [:fa-link: redox-os.org](https://www.redox-os.org/)

View File

@@ -1,6 +1,9 @@
use assetserver::Asset;
use maud::html;
use page::{Page, PageMetadata};
use maud::{Markup, html};
use page::{
RenderContext,
servable::{Page, PageMetadata},
};
use std::pin::Pin;
use crate::{
components::{
@@ -9,70 +12,78 @@ use crate::{
md::Markdown,
misc::FarLink,
},
routes::assets::{Image_Cover, Image_Icon},
pages::{MAIN_TEMPLATE, footer},
};
pub fn index() -> Page {
Page {
meta: PageMetadata {
MAIN_TEMPLATE.derive(
PageMetadata {
title: "Betalupi: About".into(),
author: Some("Mark".into()),
description: Some("Description".into()),
image: Some(Image_Icon::URL.into()),
slug: None,
description: None,
image: Some("/assets/img/icon.png".to_owned()),
},
render,
)
}
generate_html: Box::new(move |_page, _| {
Box::pin(async {
html! {
h2 id="about" { "About" }
fn render<'a>(
_page: &'a Page,
_ctx: &'a RenderContext,
) -> Pin<Box<dyn Future<Output = Markup> + Send + Sync + 'a>> {
Box::pin(async {
html! {
div class="wrapper" style="margin-top:3ex;" {
h2 id="about" { "About" }
div {
div {
img
src=(Image_Cover::URL)
style="float:left;margin:10px 10px 10px 10px;display:block;width:25%;"
{}
img
class="img-placeholder"
src="/assets/img/cover-small.jpg?t=maxdim(20,20)"
data-large="/assets/img/cover-small.jpg"
style="image-rendering:pixelated;float:left;margin:10px;display:block;width:25%;"
{}
div style="margin:2ex 1ex 2ex 1ex;display:inline-block;overflow:hidden;width:60%;" {
"Welcome, you've reached Mark's main page. Here you'll find"
" links to various projects I've worked on."
div style="margin:2ex 1ex 2ex 1ex;display:inline-block;overflow:hidden;width:60%;" {
"Welcome, you've reached Mark's main page. Here you'll find"
" links to various projects I've worked on."
ul {
li { (MangledBetaEmail {}) }
li { (MangledGoogleEmail {}) }
ul {
li { (MangledBetaEmail {}) }
li { (MangledGoogleEmail {}) }
li {
(
FarLink(
"https://github.com/rm-dr",
html!(
(FAIcon::Github)
"rm-dr"
)
li {
(
FarLink(
"https://github.com/rm-dr",
html!(
(FAIcon::Github)
"rm-dr"
)
)
}
)
}
li {
(
FarLink(
"https://git.betalupi.com",
html!(
(FAIcon::Git)
"git.betalupi.com"
)
li {
(
FarLink(
"https://git.betalupi.com",
html!(
(FAIcon::Git)
"git.betalupi.com"
)
)
}
)
}
}
br style="clear:both;" {}
}
(Markdown(include_str!("index.md")))
br style="clear:both;" {}
}
})
}),
..Default::default()
}
(Markdown(include_str!("index.md")))
(footer())
}
}
})
}

View File

@@ -1,7 +1,7 @@
+++
title = "Links"
author = "Mark"
slug = "links"
backlinks = true
+++

View File

@@ -1,13 +1,23 @@
use assetserver::Asset;
use page::Page;
use chrono::TimeDelta;
use maud::{Markup, PreEscaped, html};
use page::{
RenderContext,
servable::{Page, PageMetadata, PageTemplate},
};
use crate::{components::md::page_from_markdown, routes::assets::Image_Icon};
use crate::components::{
fa::FAIcon,
md::{Markdown, meta_from_markdown},
misc::FarLink,
};
mod handouts;
mod index;
mod notfound;
pub use handouts::handouts;
pub use index::index;
pub use notfound::notfound;
pub fn links() -> Page {
/*
@@ -17,19 +27,160 @@ pub fn links() -> Page {
http://www.3dprintmath.com/
*/
page_from_markdown(include_str!("links.md"), Some(Image_Icon::URL.to_owned()))
page_from_markdown(
include_str!("links.md"),
Some("/assets/img/icon.png".to_owned()),
)
}
pub fn betalupi() -> Page {
page_from_markdown(
include_str!("betalupi.md"),
Some(Image_Icon::URL.to_owned()),
Some("/assets/img/icon.png".to_owned()),
)
}
pub fn htwah_typesetting() -> Page {
page_from_markdown(
include_str!("htwah-typesetting.md"),
Some(Image_Icon::URL.to_owned()),
Some("/assets/img/icon.png".to_owned()),
)
}
//
// MARK: md
//
fn page_from_markdown(md: impl Into<String>, default_image: Option<String>) -> Page {
let md: String = md.into();
let md = Markdown::parse(&md);
let mut meta = meta_from_markdown(&md)
.unwrap_or(Some(PageMetadata {
title: "Invalid frontmatter!".into(),
..Default::default()
}))
.unwrap_or_default();
if meta.image.is_none() {
meta.image = default_image
}
let html = PreEscaped(md.render());
MAIN_TEMPLATE
.derive(meta, move |_page, ctx| {
let html = html.clone();
Box::pin(async move {
html! {
div class="wrapper" style="margin-top:3ex;" {
@if let Some(backlinks) = backlinks(ctx) {
(backlinks)
}
(html)
(footer())
}
}
})
})
.html_ttl(Some(TimeDelta::days(1)))
.immutable(true)
}
//
// MARK: components
//
const MAIN_TEMPLATE: PageTemplate = PageTemplate {
// Order matters, base htmx goes first
scripts_linked: &["/assets/htmx.js", "/assets/htmx-json.js"],
// TODO: use htmx for this
scripts_inline: &["
window.onload = function() {
var imgs = document.querySelectorAll('.img-placeholder');
imgs.forEach(img => {
img.style.border = 'none';
img.style.filter = 'blur(10px)';
img.style.transition = 'filter 0.3s';
var lg = new Image();
lg.src = img.dataset.large;
lg.onload = function () {
img.src = img.dataset.large;
img.style.filter = 'blur(0px)';
};
})
}
"],
styles_inline: &[],
styles_linked: &["/assets/css/main.css"],
extra_meta: &[(
"viewport",
"width=device-width,initial-scale=1,user-scalable=no",
)],
..PageTemplate::const_default()
};
pub fn backlinks(ctx: &RenderContext) -> Option<Markup> {
let mut backlinks = vec![("/", "home")];
let mut segments = ctx.route.split("/").skip(1).collect::<Vec<_>>();
let last = segments.pop();
let mut end = 0;
for s in segments {
end += s.len();
backlinks.push((&ctx.route[0..=end], s));
end += 1; // trailing slash
}
last.map(|last| {
html! {
div {
@for (url, text) in backlinks {
a href=(url) style="padding-left:5pt;padding-right:5pt;" { (text) }
"/"
}
span style="color:var(--metaColor);padding-left:5pt;padding-right:5pt;" { (last) }
}
}
})
}
pub fn footer() -> Markup {
html!(
footer style="margin-top:10rem;" {
hr class = "footline";
div class = "footContainer" {
p {
"This site was built by hand with "
(FarLink("https://rust-lang.org", "Rust"))
", "
(FarLink("https://maud.lambda.xyz", "Maud"))
", and "
(FarLink("https://docs.rs/axum/latest/axum", "Axum"))
". "
(
FarLink(
"https://git.betalupi.com/Mark/webpage",
html!(
(FAIcon::Git)
"Source here!"
)
)
)
}
}
}
)
}

View File

@@ -0,0 +1,29 @@
use maud::html;
use page::servable::{Page, PageMetadata};
use reqwest::StatusCode;
use crate::pages::MAIN_TEMPLATE;
pub fn notfound() -> Page {
MAIN_TEMPLATE.derive(
PageMetadata {
title: "Page not found".into(),
author:None,
description: None,
image: Some("/assets/img/icon.png".to_owned()),
},
move |_page, _ctx| {
Box::pin(async {
html! {
div class="wrapper" {
div style="display:flex;flex-direction:column;align-items:center;justify-content:center;min-height:100vh" {
p style="font-weight:bold;font-size:50pt;margin:0;" { "404" }
p style="font-size:13pt;margin:0;color:var(--grey);" { "(page not found)" }
a style="font-size:12pt;margin:10pt;padding:5px;" href="/" {"<- Back to site"}
}
}
}
})
},
).response_code(StatusCode::NOT_FOUND)
}

View File

@@ -1,206 +0,0 @@
use assetserver::Asset;
use axum::http::header;
use macro_assets::assets;
use macro_sass::sass;
use toolbox::mime::MimeType;
assets! {
prefix: "/assets"
router: asset_router()
//
// MARK: styles
//
Styles_Main {
source: sass!("css/main.scss").as_bytes(),
target: "/css/main.css",
headers: [
(header::CONTENT_TYPE, "text/css")
]
}
//
// MARK: images
//
Image_Cover {
source: include_bytes!("../../assets/images/cover-small.jpg"),
target: "/img/face.jpg",
headers: [
(header::CONTENT_TYPE, "image/jpg")
]
}
Image_Betalupi {
source: include_bytes!("../../assets/images/betalupi-map.png"),
target: "/img/betalupi.png",
headers: [
(header::CONTENT_TYPE, "image/png")
]
}
Image_Icon {
source: include_bytes!("../../assets/images/icon.png"),
target: "/img/icon.png",
headers: [
(header::CONTENT_TYPE, "image/png")
]
}
//
// MARK: fonts
//
FiraCode_Bold_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-Bold.woff2"),
target: "/fonts/FiraCode-Bold.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
FiraCode_Light_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-Light.woff2"),
target: "/fonts/FiraCode-Light.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
FiraCode_Medium_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-Medium.woff2"),
target: "/fonts/FiraCode-Medium.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
FiraCode_Regular_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-Regular.woff2"),
target: "/fonts/FiraCode-Regular.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
FiraCode_SemiBold_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-SemiBold.woff2"),
target: "/fonts/FiraCode-SemiBold.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
FiraCode_VF_woff2 {
source: include_bytes!("../../assets/fonts/fira/FiraCode-VF.woff2"),
target: "/fonts/FiraCode-VF.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
//
// MARK: icons
//
Fa_Brands_woff2 {
source: include_bytes!("../../assets/fonts/fa/fa-brands-400.woff2"),
target: "/fonts/fa/fa-brands-400.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
Fa_Regular_woff2 {
source: include_bytes!("../../assets/fonts/fa/fa-regular-400.woff2"),
target: "/fonts/fa/fa-regular-400.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
Fa_Solid_woff2 {
source: include_bytes!("../../assets/fonts/fa/fa-solid-900.woff2"),
target: "/fonts/fa/fa-solid-900.woff2",
headers: [
(header::CONTENT_TYPE, "application/font-woff2")
]
}
Fa_Brands_ttf {
source: include_bytes!("../../assets/fonts/fa/fa-brands-400.ttf"),
target: "/fonts/fa/fa-brands-400.ttf",
headers: [
(header::CONTENT_TYPE, "application/font-ttf")
]
}
Fa_Regular_ttf {
source: include_bytes!("../../assets/fonts/fa/fa-regular-400.ttf"),
target: "/fonts/fa/fa-regular-400.ttf",
headers: [
(header::CONTENT_TYPE, "application/font-ttf")
]
}
Fa_Solid_ttf {
source: include_bytes!("../../assets/fonts/fa/fa-solid-900.ttf"),
target: "/fonts/fa/fa-solid-900.ttf",
headers: [
(header::CONTENT_TYPE, "application/font-ttf")
]
}
//
// MARK: htwah
//
Htwah_Definitions {
source: include_bytes!("../../assets/htwah/definitions.pdf"),
target: "/htwah/definitions.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
Htwah_Numbering {
source: include_bytes!("../../assets/htwah/numbering.pdf"),
target: "/htwah/numbering.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
Htwah_SolsA {
source: include_bytes!("../../assets/htwah/sols-a.pdf"),
target: "/htwah/sols-a.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
Htwah_SolsB {
source: include_bytes!("../../assets/htwah/sols-b.pdf"),
target: "/htwah/sols-b.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
Htwah_SpacingA {
source: include_bytes!("../../assets/htwah/spacing-a.pdf"),
target: "/htwah/spacing-a.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
Htwah_SpacingB {
source: include_bytes!("../../assets/htwah/spacing-b.pdf"),
target: "/htwah/spacing-b.pdf",
headers: [
(header::CONTENT_TYPE, MimeType::Pdf.to_string())
]
}
}

View File

@@ -1,80 +1,203 @@
use std::{pin::Pin, sync::Arc};
use assetserver::Asset;
use axum::Router;
use maud::{DOCTYPE, Markup, PreEscaped, html};
use page::{Page, PageServer, RequestContext};
use tracing::info;
use macro_sass::sass;
use page::{
ServableRoute,
servable::{Redirect, StaticAsset},
};
use toolbox::mime::MimeType;
use tower_http::compression::{CompressionLayer, DefaultPredicate};
use crate::{components::misc::FarLink, pages, routes::assets::Styles_Main};
pub mod assets;
use crate::pages;
pub(super) fn router() -> Router<()> {
let (asset_prefix, asset_router) = assets::asset_router();
info!("Serving assets at {asset_prefix}");
let compression: CompressionLayer = CompressionLayer::new()
.br(true)
.deflate(true)
.gzip(true)
.zstd(true)
.compress_when(DefaultPredicate::new());
let router = build_server().into_router();
Router::new().merge(router).nest(asset_prefix, asset_router)
build_server().into_router().layer(compression)
}
fn build_server() -> Arc<PageServer> {
let server = PageServer::new(Box::new(page_wrapper));
server
fn build_server() -> ServableRoute {
ServableRoute::new()
.with_404(pages::notfound())
.add_page("/", pages::index())
.add_page("/links", pages::links())
.add_page("/whats-a-betalupi", pages::betalupi())
.add_page("/handouts", pages::handouts())
.add_page("/htwah", pages::htwah_typesetting());
server
}
fn page_wrapper<'a>(
page: &'a Page,
req_ctx: &'a RequestContext,
) -> Pin<Box<dyn Future<Output = Markup> + 'a + Send + Sync>> {
Box::pin(async move {
html! {
(DOCTYPE)
html {
head {
meta charset="UTF" {}
meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no" {}
meta content="text/html; charset=UTF-8" http-equiv="content-type" {}
meta property="og:type" content="website" {}
link rel="stylesheet" href=(Styles_Main::URL) {}
(&page.meta)
title { (PreEscaped(page.meta.title.clone())) }
}
body {
div class="wrapper" {
main { ( page.generate_html(req_ctx).await ) }
footer {
hr class = "footline" {}
div class = "footContainer" {
p {
"This site was built by hand using "
(FarLink("https://rust-lang.org", "Rust"))
", "
(FarLink("https://maud.lambda.xyz", "Maud"))
", "
(FarLink("https://github.com/connorskees/grass", "Grass"))
", and "
(FarLink("https://docs.rs/axum/latest/axum", "Axum"))
"."
}
}
}
}
}
}
}
})
.add_page("/htwah", {
#[expect(clippy::unwrap_used)]
Redirect::new("/handouts").unwrap()
})
.add_page("/htwah/typesetting", pages::htwah_typesetting())
.add_page("/assets/htmx.js", page::HTMX_2_0_8)
.add_page("/assets/htmx-json.js", page::EXT_JSON_1_19_12)
//
.add_page(
"/assets/css/main.css",
StaticAsset {
bytes: sass!("css/main.scss").as_bytes(),
mime: MimeType::Css,
},
)
.add_page(
"/assets/img/cover-small.jpg",
StaticAsset {
bytes: include_bytes!("../../assets/images/cover-small.jpg"),
mime: MimeType::Jpg,
},
)
.add_page(
"/assets/img/betalupi.png",
StaticAsset {
bytes: include_bytes!("../../assets/images/betalupi-map.png"),
mime: MimeType::Png,
},
)
.add_page(
"/assets/img/icon.png",
StaticAsset {
bytes: include_bytes!("../../assets/images/icon.png"),
mime: MimeType::Png,
},
)
//
// MARK: fonts
//
.add_page(
"/assets/fonts/FiraCode-Bold.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-Bold.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/FiraCode-Light.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-Light.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/FiraCode-Medium.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-Medium.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/FiraCode-Regular.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-Regular.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/FiraCode-SemiBold.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-SemiBold.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/FiraCode-VF.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fira/FiraCode-VF.woff2"),
mime: MimeType::Woff2,
},
)
//
// MARK: icons
//
.add_page(
"/assets/fonts/fa/fa-brands-400.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-brands-400.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/fa/fa-regular-400.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-regular-400.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/fa/fa-solid-900.woff2",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-solid-900.woff2"),
mime: MimeType::Woff2,
},
)
.add_page(
"/assets/fonts/fa/fa-brands-400.ttf",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-brands-400.ttf"),
mime: MimeType::Ttf,
},
)
.add_page(
"/assets/fonts/fa/fa-regular-400.ttf",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-regular-400.ttf"),
mime: MimeType::Ttf,
},
)
.add_page(
"/assets/fonts/fa/fa-solid-900.ttf",
StaticAsset {
bytes: include_bytes!("../../assets/fonts/fa/fa-solid-900.ttf"),
mime: MimeType::Ttf,
},
)
//
// MARK: htwah
//
.add_page(
"/assets/htwah/definitions.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/definitions.pdf"),
mime: MimeType::Pdf,
},
)
.add_page(
"/assets/htwah/numbering.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/numbering.pdf"),
mime: MimeType::Pdf,
},
)
.add_page(
"/assets/htwah/sols-a.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/sols-a.pdf"),
mime: MimeType::Pdf,
},
)
.add_page(
"/assets/htwah/sols-b.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/sols-b.pdf"),
mime: MimeType::Pdf,
},
)
.add_page(
"/assets/htwah/spacing-a.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/spacing-a.pdf"),
mime: MimeType::Pdf,
},
)
.add_page(
"/assets/htwah/spacing-b.pdf",
StaticAsset {
bytes: include_bytes!("../../assets/htwah/spacing-b.pdf"),
mime: MimeType::Pdf,
},
)
}
#[test]
@@ -84,7 +207,6 @@ fn server_builds_without_panic() {
.build()
.unwrap()
.block_on(async {
// Needs tokio context
let _server = build_server().into_router();
let _server = build_server();
});
}

View File

@@ -6,4 +6,4 @@ extend-ignore-re = [
]
[files]
extend-exclude = ["crates/service/service-webpage/css"]
extend-exclude = ["crates/service/service-webpage/css", "crates/lib/page/htmx"]