Move server code into its own module
This commit is contained in:
parent
ad0c1a69cb
commit
45abda2b6d
12 changed files with 15 additions and 4 deletions
36
src/server/recurring.rs
Normal file
36
src/server/recurring.rs
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
//! Recurring actions and updates.
|
||||
|
||||
// TODO `fetch` submodule for fetching new commits
|
||||
// TODO `queue` submodule for updating the queue
|
||||
|
||||
mod queue;
|
||||
mod repo;
|
||||
|
||||
use tracing::{debug_span, error, Instrument};
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
async fn recurring_task(state: &AppState) {
|
||||
async {
|
||||
if let Err(e) = repo::update(&state.db, state.repo.clone()).await {
|
||||
error!("Error updating repo:\n{e:?}");
|
||||
};
|
||||
}
|
||||
.instrument(debug_span!("update repo"))
|
||||
.await;
|
||||
|
||||
async {
|
||||
if let Err(e) = queue::update(&state.db).await {
|
||||
error!("Error updating queue:\n{e:?}");
|
||||
};
|
||||
}
|
||||
.instrument(debug_span!("update queue"))
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn run(state: AppState) {
|
||||
loop {
|
||||
recurring_task(&state).await;
|
||||
tokio::time::sleep(state.config.repo.update_delay).await;
|
||||
}
|
||||
}
|
||||
41
src/server/recurring/queue.rs
Normal file
41
src/server/recurring/queue.rs
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
use sqlx::{Acquire, SqlitePool};
|
||||
use time::OffsetDateTime;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::{somehow, util};
|
||||
|
||||
pub async fn update(db: &SqlitePool) -> somehow::Result<()> {
|
||||
debug!("Updating queue");
|
||||
let mut tx = db.begin().await?;
|
||||
let conn = tx.acquire().await?;
|
||||
|
||||
// Get all newly added tracked commits
|
||||
let new = sqlx::query!("SELECT hash FROM commits WHERE new AND reachable = 2")
|
||||
.fetch_all(&mut *conn)
|
||||
.await?;
|
||||
let new_len = new.len();
|
||||
|
||||
// Insert them into the queue
|
||||
for row in new {
|
||||
let id = util::new_run_id();
|
||||
let date = OffsetDateTime::now_utc();
|
||||
sqlx::query!(
|
||||
"INSERT INTO queue (id, hash, date) VALUES (?, ?, ?)",
|
||||
id,
|
||||
row.hash,
|
||||
date
|
||||
)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
}
|
||||
debug!("Added {new_len} commits to the queue");
|
||||
|
||||
// Mark all commits as old
|
||||
sqlx::query!("UPDATE commits SET new = false")
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
debug!("Updated queue");
|
||||
Ok(())
|
||||
}
|
||||
280
src/server/recurring/repo.rs
Normal file
280
src/server/recurring/repo.rs
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
//! Add new commits to the database and update the tracked refs.
|
||||
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use futures::TryStreamExt;
|
||||
use gix::{
|
||||
objs::Kind, prelude::ObjectIdExt, refs::Reference, ObjectId, Repository, ThreadSafeRepository,
|
||||
};
|
||||
use sqlx::{Acquire, SqliteConnection, SqlitePool};
|
||||
use tracing::{debug, info};
|
||||
|
||||
use crate::{somehow, util};
|
||||
|
||||
async fn get_all_commit_hashes_from_db(
|
||||
conn: &mut SqliteConnection,
|
||||
) -> somehow::Result<HashSet<ObjectId>> {
|
||||
let hashes = sqlx::query!("SELECT hash FROM commits")
|
||||
.fetch(conn)
|
||||
.err_into::<somehow::Error>()
|
||||
.and_then(|r| async move { r.hash.parse::<ObjectId>().map_err(|e| e.into()) })
|
||||
.try_collect::<HashSet<_>>()
|
||||
.await?;
|
||||
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
fn get_all_refs_from_repo(repo: &Repository) -> somehow::Result<Vec<Reference>> {
|
||||
let mut references = vec![];
|
||||
for reference in repo.references()?.all()? {
|
||||
let mut reference = reference.map_err(somehow::Error::from_box)?;
|
||||
reference.peel_to_id_in_place()?;
|
||||
|
||||
// Some repos *cough*linuxkernel*cough* have refs that don't point to
|
||||
// commits. This makes the rev walk choke and die. We don't want that.
|
||||
if reference.id().object()?.kind != Kind::Commit {
|
||||
continue;
|
||||
}
|
||||
|
||||
references.push(reference.detach());
|
||||
}
|
||||
Ok(references)
|
||||
}
|
||||
|
||||
fn get_new_commits_from_repo(
|
||||
repo: &Repository,
|
||||
refs: &[Reference],
|
||||
old: &HashSet<ObjectId>,
|
||||
) -> somehow::Result<Vec<ObjectId>> {
|
||||
let ref_ids = refs.iter().flat_map(|r| r.peeled.into_iter());
|
||||
|
||||
// Walk from those until hitting old references
|
||||
let mut new = vec![];
|
||||
for commit in repo.rev_walk(ref_ids).selected(|c| !old.contains(c))? {
|
||||
new.push(commit?.id);
|
||||
}
|
||||
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
fn get_all_refs_and_new_commits_from_repo(
|
||||
repo: &Repository,
|
||||
old: &HashSet<ObjectId>,
|
||||
) -> somehow::Result<(Vec<Reference>, Vec<ObjectId>)> {
|
||||
let refs = get_all_refs_from_repo(repo)?;
|
||||
let new = get_new_commits_from_repo(repo, &refs, old)?;
|
||||
Ok((refs, new))
|
||||
}
|
||||
|
||||
async fn insert_new_commits(
|
||||
conn: &mut SqliteConnection,
|
||||
repo: &Repository,
|
||||
new: &[ObjectId],
|
||||
) -> somehow::Result<()> {
|
||||
for (i, id) in new.iter().enumerate() {
|
||||
let commit = id.attach(repo).object()?.try_into_commit()?;
|
||||
let hash = commit.id.to_string();
|
||||
let author_info = commit.author()?;
|
||||
let author = util::format_actor(author_info.actor())?;
|
||||
let author_date = util::time_to_offset_datetime(author_info.time)?;
|
||||
let committer_info = commit.committer()?;
|
||||
let committer = util::format_actor(committer_info.actor())?;
|
||||
let committer_date = util::time_to_offset_datetime(committer_info.time)?;
|
||||
let message = commit.message_raw()?.to_string();
|
||||
|
||||
sqlx::query!(
|
||||
"\
|
||||
INSERT OR IGNORE INTO commits ( \
|
||||
hash, \
|
||||
author, \
|
||||
author_date, \
|
||||
committer, \
|
||||
committer_date, \
|
||||
message \
|
||||
) \
|
||||
VALUES (?, ?, ?, ?, ?, ?) \
|
||||
",
|
||||
hash,
|
||||
author,
|
||||
author_date,
|
||||
committer,
|
||||
committer_date,
|
||||
message
|
||||
)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
|
||||
if (i + 1) % 100000 == 0 {
|
||||
debug!("Inserted {} commits so far", i + 1);
|
||||
}
|
||||
}
|
||||
debug!("Inserted {} commits in total", new.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn insert_new_commit_links(
|
||||
conn: &mut SqliteConnection,
|
||||
repo: &Repository,
|
||||
new: &[ObjectId],
|
||||
) -> somehow::Result<()> {
|
||||
for (i, hash) in new.iter().enumerate() {
|
||||
let commit = hash.attach(repo).object()?.try_into_commit()?;
|
||||
let child = commit.id.to_string();
|
||||
for parent in commit.parent_ids() {
|
||||
let parent = parent.to_string();
|
||||
// Commits *cough*linuxkernel*cough* may list the same parent
|
||||
// multiple times, so we just ignore duplicates during insert.
|
||||
sqlx::query!(
|
||||
"INSERT OR IGNORE INTO commit_links (parent, child) VALUES (?, ?)",
|
||||
parent,
|
||||
child,
|
||||
)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if (i + 1) % 100000 == 0 {
|
||||
debug!("Inserted {} commits' links so far", i + 1);
|
||||
}
|
||||
}
|
||||
debug!("Inserted {} commits' links in total", new.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn mark_all_commits_as_old(conn: &mut SqliteConnection) -> somehow::Result<()> {
|
||||
sqlx::query!("UPDATE commits SET new = 0")
|
||||
.execute(conn)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_refs(conn: &mut SqliteConnection, refs: Vec<Reference>) -> somehow::Result<()> {
|
||||
// Remove refs that no longer exist
|
||||
let existing = refs
|
||||
.iter()
|
||||
.map(|r| r.name.to_string())
|
||||
.collect::<HashSet<_>>();
|
||||
let current = sqlx::query!("SELECT name FROM refs")
|
||||
.fetch_all(&mut *conn)
|
||||
.await?;
|
||||
for reference in current {
|
||||
if !existing.contains(&reference.name) {
|
||||
sqlx::query!("DELETE FROM refs WHERE name = ?", reference.name)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
// Add new refs and update existing refs
|
||||
for reference in refs {
|
||||
let name = reference.name.to_string();
|
||||
let Some(hash) = reference.peeled else { continue; };
|
||||
let hash = hash.to_string();
|
||||
|
||||
sqlx::query!(
|
||||
"\
|
||||
INSERT INTO refs (name, hash) VALUES (?, ?) \
|
||||
ON CONFLICT (name) DO UPDATE \
|
||||
SET hash = excluded.hash \
|
||||
",
|
||||
name,
|
||||
hash
|
||||
)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn track_main_branch(conn: &mut SqliteConnection, repo: &Repository) -> somehow::Result<()> {
|
||||
let Some(head) = repo.head_ref()? else { return Ok(()); };
|
||||
let name = head.inner.name.to_string();
|
||||
sqlx::query!("UPDATE refs SET tracked = true WHERE name = ?", name)
|
||||
.execute(conn)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_commit_tracked_status(conn: &mut SqliteConnection) -> somehow::Result<()> {
|
||||
sqlx::query!(
|
||||
"\
|
||||
WITH RECURSIVE \
|
||||
tracked (hash) AS ( \
|
||||
SELECT hash FROM refs WHERE tracked \
|
||||
UNION \
|
||||
SELECT parent FROM commit_links \
|
||||
JOIN tracked ON hash = child \
|
||||
), \
|
||||
reachable (hash) AS ( \
|
||||
SELECT hash FROM refs \
|
||||
UNION \
|
||||
SELECT hash FROM tracked \
|
||||
UNION \
|
||||
SELECT parent FROM commit_links \
|
||||
JOIN reachable ON hash = child \
|
||||
) \
|
||||
UPDATE commits \
|
||||
SET reachable = CASE \
|
||||
WHEN hash IN tracked THEN 2 \
|
||||
WHEN hash IN reachable THEN 1 \
|
||||
ELSE 0 \
|
||||
END \
|
||||
"
|
||||
)
|
||||
.execute(conn)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update(db: &SqlitePool, repo: Arc<ThreadSafeRepository>) -> somehow::Result<()> {
|
||||
debug!("Updating repo");
|
||||
let thread_local_repo = repo.to_thread_local();
|
||||
let mut tx = db.begin().await?;
|
||||
let conn = tx.acquire().await?;
|
||||
|
||||
let old = get_all_commit_hashes_from_db(&mut *conn).await?;
|
||||
debug!("Loaded {} commits from the db", old.len());
|
||||
|
||||
let repo_is_new = old.is_empty();
|
||||
if repo_is_new {
|
||||
info!("Initializing new repo");
|
||||
}
|
||||
|
||||
// This can take a while for larger repos. Running it via spawn_blocking
|
||||
// keeps it from blocking the entire tokio worker.
|
||||
let (refs, new) = tokio::task::spawn_blocking(move || {
|
||||
get_all_refs_and_new_commits_from_repo(&repo.to_thread_local(), &old)
|
||||
})
|
||||
.await??;
|
||||
debug!("Found {} new commits in repo", new.len());
|
||||
|
||||
// Defer foreign key checks until the end of the transaction to improve
|
||||
// insert performance.
|
||||
sqlx::query!("PRAGMA defer_foreign_keys=1")
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
|
||||
// Inserts are grouped by table so sqlite can process them *a lot* faster
|
||||
// than if they were grouped by commit (insert commit and parents, then next
|
||||
// commit and so on).
|
||||
insert_new_commits(conn, &thread_local_repo, &new).await?;
|
||||
insert_new_commit_links(conn, &thread_local_repo, &new).await?;
|
||||
if repo_is_new {
|
||||
mark_all_commits_as_old(conn).await?;
|
||||
}
|
||||
|
||||
update_refs(conn, refs).await?;
|
||||
if repo_is_new {
|
||||
track_main_branch(conn, &thread_local_repo).await?;
|
||||
}
|
||||
update_commit_tracked_status(conn).await?;
|
||||
debug!("Updated tracked refs");
|
||||
|
||||
tx.commit().await?;
|
||||
if repo_is_new {
|
||||
info!("Initialized new repo");
|
||||
}
|
||||
debug!("Updated repo");
|
||||
Ok(())
|
||||
}
|
||||
58
src/server/web.rs
Normal file
58
src/server/web.rs
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
mod commit;
|
||||
mod commit_hash;
|
||||
mod index;
|
||||
mod queue;
|
||||
mod queue_id;
|
||||
mod r#static;
|
||||
|
||||
use axum::{routing::get, Router, Server};
|
||||
|
||||
use crate::{config::Config, somehow, state::AppState};
|
||||
|
||||
pub enum Tab {
|
||||
Index,
|
||||
Commit,
|
||||
Queue,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Base {
|
||||
root: String,
|
||||
repo_name: String,
|
||||
current: String,
|
||||
}
|
||||
|
||||
impl Base {
|
||||
pub fn new(config: &Config, tab: Tab) -> Self {
|
||||
let current = match tab {
|
||||
Tab::Index => "index",
|
||||
Tab::Commit => "commit",
|
||||
Tab::Queue => "queue",
|
||||
};
|
||||
Self {
|
||||
root: config.web.base(),
|
||||
repo_name: config.repo.name.clone(),
|
||||
current: current.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(state: AppState) -> somehow::Result<()> {
|
||||
// TODO Add text body to body-less status codes
|
||||
|
||||
let app = Router::new()
|
||||
.route("/", get(index::get))
|
||||
.route("/commit/", get(commit::get))
|
||||
.route("/commit/:hash", get(commit_hash::get))
|
||||
.route("/queue/", get(queue::get))
|
||||
.route("/queue/table", get(queue::get_table))
|
||||
.route("/queue/:id", get(queue_id::get))
|
||||
.fallback(get(r#static::static_handler))
|
||||
.with_state(state.clone());
|
||||
|
||||
Server::bind(&"0.0.0.0:8000".parse().unwrap())
|
||||
.serve(app.into_make_service())
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
18
src/server/web/commit.rs
Normal file
18
src/server/web/commit.rs
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
use askama::Template;
|
||||
use axum::{extract::State, response::IntoResponse};
|
||||
|
||||
use crate::{config::Config, somehow};
|
||||
|
||||
use super::{Base, Tab};
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "commit.html")]
|
||||
struct CommitTemplate {
|
||||
base: Base,
|
||||
}
|
||||
|
||||
pub async fn get(State(config): State<&'static Config>) -> somehow::Result<impl IntoResponse> {
|
||||
Ok(CommitTemplate {
|
||||
base: Base::new(config, Tab::Commit),
|
||||
})
|
||||
}
|
||||
114
src/server/web/commit_hash.rs
Normal file
114
src/server/web/commit_hash.rs
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use futures::TryStreamExt;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::{config::Config, somehow, util};
|
||||
|
||||
use super::{Base, Tab};
|
||||
|
||||
struct Commit {
|
||||
hash: String,
|
||||
short: String,
|
||||
reachable: i64,
|
||||
}
|
||||
|
||||
impl Commit {
|
||||
fn new(hash: String, message: &str, reachable: i64) -> Self {
|
||||
Self {
|
||||
short: util::format_commit_short(&hash, message),
|
||||
hash,
|
||||
reachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "commit_hash.html")]
|
||||
struct CommitHashTemplate {
|
||||
base: Base,
|
||||
hash: String,
|
||||
author: String,
|
||||
author_date: String,
|
||||
commit: String,
|
||||
commit_date: String,
|
||||
parents: Vec<Commit>,
|
||||
children: Vec<Commit>,
|
||||
summary: String,
|
||||
message: String,
|
||||
reachable: i64,
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
Path(hash): Path<String>,
|
||||
State(config): State<&'static Config>,
|
||||
State(db): State<SqlitePool>,
|
||||
) -> somehow::Result<Response> {
|
||||
let Some(commit) = sqlx::query!(
|
||||
"\
|
||||
SELECT \
|
||||
hash, \
|
||||
author, \
|
||||
author_date AS \"author_date: time::OffsetDateTime\", \
|
||||
committer, \
|
||||
committer_date AS \"committer_date: time::OffsetDateTime\", \
|
||||
message, \
|
||||
reachable \
|
||||
FROM commits \
|
||||
WHERE hash = ? \
|
||||
",
|
||||
hash
|
||||
)
|
||||
.fetch_optional(&db)
|
||||
.await?
|
||||
else {
|
||||
return Ok(StatusCode::NOT_FOUND.into_response());
|
||||
};
|
||||
|
||||
let parents = sqlx::query!(
|
||||
"\
|
||||
SELECT hash, message, reachable FROM commits \
|
||||
JOIN commit_links ON hash = parent \
|
||||
WHERE child = ? \
|
||||
ORDER BY reachable DESC, unixepoch(committer_date) ASC \
|
||||
",
|
||||
hash
|
||||
)
|
||||
.fetch(&db)
|
||||
.map_ok(|r| Commit::new(r.hash, &r.message, r.reachable))
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let children = sqlx::query!(
|
||||
"\
|
||||
SELECT hash, message, reachable FROM commits \
|
||||
JOIN commit_links ON hash = child \
|
||||
WHERE parent = ? \
|
||||
ORDER BY reachable DESC, unixepoch(committer_date) ASC \
|
||||
",
|
||||
hash
|
||||
)
|
||||
.fetch(&db)
|
||||
.map_ok(|r| Commit::new(r.hash, &r.message, r.reachable))
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
Ok(CommitHashTemplate {
|
||||
base: Base::new(config, Tab::Commit),
|
||||
hash: commit.hash,
|
||||
author: commit.author,
|
||||
author_date: util::format_time(commit.author_date),
|
||||
commit: commit.committer,
|
||||
commit_date: util::format_time(commit.committer_date),
|
||||
parents,
|
||||
children,
|
||||
summary: util::format_commit_summary(&commit.message),
|
||||
message: commit.message.trim_end().to_string(),
|
||||
reachable: commit.reachable,
|
||||
}
|
||||
.into_response())
|
||||
}
|
||||
64
src/server/web/index.rs
Normal file
64
src/server/web/index.rs
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
use askama::Template;
|
||||
use axum::{extract::State, response::IntoResponse};
|
||||
use futures::TryStreamExt;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::{config::Config, somehow, util};
|
||||
|
||||
use super::{Base, Tab};
|
||||
|
||||
struct Ref {
|
||||
name: String,
|
||||
hash: String,
|
||||
short: String,
|
||||
reachable: i64,
|
||||
tracked: bool,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "index.html")]
|
||||
struct IndexTemplate {
|
||||
base: Base,
|
||||
tracked_refs: Vec<Ref>,
|
||||
untracked_refs: Vec<Ref>,
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
State(config): State<&'static Config>,
|
||||
State(db): State<SqlitePool>,
|
||||
) -> somehow::Result<impl IntoResponse> {
|
||||
let refs = sqlx::query!(
|
||||
"\
|
||||
SELECT name, hash, message, reachable, tracked \
|
||||
FROM refs \
|
||||
JOIN commits USING (hash) \
|
||||
ORDER BY name ASC \
|
||||
"
|
||||
)
|
||||
.fetch(&db)
|
||||
.map_ok(|r| Ref {
|
||||
short: util::format_commit_short(&r.hash, &r.message),
|
||||
name: r.name,
|
||||
hash: r.hash,
|
||||
reachable: r.reachable,
|
||||
tracked: r.tracked != 0,
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let mut tracked_refs = vec![];
|
||||
let mut untracked_refs = vec![];
|
||||
for reference in refs {
|
||||
if reference.tracked {
|
||||
tracked_refs.push(reference);
|
||||
} else {
|
||||
untracked_refs.push(reference);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(IndexTemplate {
|
||||
base: Base::new(config, Tab::Index),
|
||||
tracked_refs,
|
||||
untracked_refs,
|
||||
})
|
||||
}
|
||||
85
src/server/web/queue.rs
Normal file
85
src/server/web/queue.rs
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
use askama::Template;
|
||||
use axum::{extract::State, response::IntoResponse};
|
||||
use futures::TryStreamExt;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::{config::Config, somehow, util};
|
||||
|
||||
use super::{Base, Tab};
|
||||
|
||||
struct Task {
|
||||
id: String,
|
||||
short: String,
|
||||
reachable: i64,
|
||||
since: String,
|
||||
priority: i64,
|
||||
odd: bool,
|
||||
}
|
||||
|
||||
async fn get_queue(db: &SqlitePool) -> somehow::Result<Vec<Task>> {
|
||||
let mut tasks = sqlx::query!(
|
||||
"\
|
||||
SELECT \
|
||||
id, \
|
||||
hash, \
|
||||
message, \
|
||||
reachable, \
|
||||
date AS \"date: time::OffsetDateTime\", \
|
||||
priority \
|
||||
FROM queue \
|
||||
JOIN commits USING (hash) \
|
||||
ORDER BY priority DESC, unixepoch(date) DESC, hash ASC \
|
||||
"
|
||||
)
|
||||
.fetch(db)
|
||||
.map_ok(|r| Task {
|
||||
id: r.id,
|
||||
short: util::format_commit_short(&r.hash, &r.message),
|
||||
reachable: r.reachable,
|
||||
since: util::format_delta_from_now(r.date),
|
||||
priority: r.priority,
|
||||
odd: false,
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let mut last_priority = None;
|
||||
let mut odd = false;
|
||||
for task in tasks.iter_mut().rev() {
|
||||
if last_priority.is_some() && last_priority != Some(task.priority) {
|
||||
odd = !odd;
|
||||
}
|
||||
task.odd = odd;
|
||||
last_priority = Some(task.priority);
|
||||
}
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "queue_table.html")]
|
||||
struct QueueTableTemplate {
|
||||
tasks: Vec<Task>,
|
||||
}
|
||||
|
||||
pub async fn get_table(State(db): State<SqlitePool>) -> somehow::Result<impl IntoResponse> {
|
||||
let tasks = get_queue(&db).await?;
|
||||
Ok(QueueTableTemplate { tasks })
|
||||
}
|
||||
#[derive(Template)]
|
||||
#[template(path = "queue.html")]
|
||||
struct QueueTemplate {
|
||||
base: Base,
|
||||
table: QueueTableTemplate,
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
State(config): State<&'static Config>,
|
||||
State(db): State<SqlitePool>,
|
||||
) -> somehow::Result<impl IntoResponse> {
|
||||
let tasks = get_queue(&db).await?;
|
||||
Ok(QueueTemplate {
|
||||
base: Base::new(config, Tab::Queue),
|
||||
table: QueueTableTemplate { tasks },
|
||||
})
|
||||
}
|
||||
65
src/server/web/queue_id.rs
Normal file
65
src/server/web/queue_id.rs
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::{config::Config, somehow, util};
|
||||
|
||||
use super::{Base, Tab};
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "queue_id.html")]
|
||||
struct QueueIdTemplate {
|
||||
base: Base,
|
||||
// Task
|
||||
id: String,
|
||||
hash: String,
|
||||
date: String,
|
||||
priority: i64,
|
||||
// Commit
|
||||
summary: String,
|
||||
short: String,
|
||||
reachable: i64,
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
Path(id): Path<String>,
|
||||
State(config): State<&'static Config>,
|
||||
State(db): State<SqlitePool>,
|
||||
) -> somehow::Result<Response> {
|
||||
let Some(task) = sqlx::query!(
|
||||
"\
|
||||
SELECT \
|
||||
id, \
|
||||
hash, \
|
||||
date AS \"date: time::OffsetDateTime\", \
|
||||
priority, \
|
||||
message, \
|
||||
reachable \
|
||||
FROM queue \
|
||||
JOIN commits USING (hash) \
|
||||
WHERE id = ? \
|
||||
",
|
||||
id
|
||||
)
|
||||
.fetch_optional(&db)
|
||||
.await?
|
||||
else {
|
||||
return Ok(StatusCode::NOT_FOUND.into_response());
|
||||
};
|
||||
|
||||
Ok(QueueIdTemplate {
|
||||
base: Base::new(config, Tab::Queue),
|
||||
date: util::format_time(task.date),
|
||||
id: task.id,
|
||||
priority: task.priority,
|
||||
summary: util::format_commit_summary(&task.message),
|
||||
short: util::format_commit_short(&task.hash, &task.message),
|
||||
hash: task.hash,
|
||||
reachable: task.reachable,
|
||||
}
|
||||
.into_response())
|
||||
}
|
||||
34
src/server/web/static.rs
Normal file
34
src/server/web/static.rs
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
//! Static files embedded in the binary.
|
||||
|
||||
use axum::{
|
||||
http::{header, StatusCode, Uri},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "$OUT_DIR/static"]
|
||||
pub struct StaticFiles;
|
||||
|
||||
pub struct StaticFile<T>(T);
|
||||
|
||||
impl<T> IntoResponse for StaticFile<T>
|
||||
where
|
||||
T: AsRef<str>,
|
||||
{
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let path = self.0.as_ref();
|
||||
match StaticFiles::get(path) {
|
||||
Some(file) => {
|
||||
let mime = mime_guess::from_path(path).first_or_octet_stream();
|
||||
([(header::CONTENT_TYPE, mime.as_ref())], file.data).into_response()
|
||||
}
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn static_handler(uri: Uri) -> impl IntoResponse {
|
||||
let path = uri.path().trim_start_matches('/').to_string();
|
||||
StaticFile(path)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue