Fix data preparation

This commit is contained in:
Joscha 2023-08-16 00:10:25 +02:00
parent 4b55f5364a
commit 91e4883137
6 changed files with 61 additions and 78 deletions

View file

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT hash, committer_date AS \"time: OffsetDateTime\" FROM commits ORDER BY hash ASC ",
"describe": {
"columns": [
{
"name": "hash",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "time: OffsetDateTime",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false
]
},
"hash": "3227b67648549c213c6ad34c78842a8d8120d3202f002d293e674cac994adedc"
}

View file

@ -1,20 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT committer_date AS \"time: OffsetDateTime\" FROM commits ORDER BY unixepoch(committer_date) ASC, hash ASC ",
"describe": {
"columns": [
{
"name": "time: OffsetDateTime",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "639b559608466d60bc9cbfb9f543db10b596edee0e9bcb590c6139a7e8927945"
}

View file

@ -1,20 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT hash FROM commits ORDER BY unixepoch(committer_date) ASC, hash ASC ",
"describe": {
"columns": [
{
"name": "hash",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "a718c407e9620bf99ed97247b89dd0afb7ef657ee0a302433641f651e72b8cbe"
}

View file

@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "WITH measurements AS ( SELECT hash, value, MAX(start) FROM runs JOIN run_measurements USING (id) WHERE metric = ? GROUP BY hash ) SELECT value FROM commits LEFT JOIN measurements USING (hash) WHERE reachable = 2 ORDER BY unixepoch(committer_date) ASC, hash ASC ",
"query": "WITH measurements AS ( SELECT hash, value, MAX(start) FROM runs JOIN run_measurements USING (id) WHERE metric = ? GROUP BY hash ) SELECT value FROM commits LEFT JOIN measurements USING (hash) ORDER BY hash ASC ",
"describe": {
"columns": [
{
@ -16,5 +16,5 @@
true
]
},
"hash": "50ae538fd51000e8b4988981f66aed60bc30de2bb8cbb4a1533dfd551503b50a"
"hash": "c202b6c8e83d2535301e0633404b83a7a7b4b105338833223dd5ab0ebdecfdfd"
}

View file

@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "SELECT parent, child FROM commit_links JOIN commits AS p ON p.hash = parent JOIN commits AS c ON c.hash = child ORDER BY unixepoch(p.committer_date) ASC, p.hash ASC, unixepoch(c.committer_date) ASC, c.hash ASC ",
"query": "SELECT parent, child FROM commit_links ORDER BY parent ASC, child ASC ",
"describe": {
"columns": [
{
@ -22,5 +22,5 @@
false
]
},
"hash": "3620648a1003211c184423a3c13b6eae3500b888490430fbaf20517830db508e"
"hash": "eecd95c794db0640d2de01dc644af7e2c6e4ab357710d6a1ee374cad106c166e"
}

View file

@ -159,27 +159,33 @@ pub async fn get_graph_data(
// The SQL queries that return one result per commit *must* return the same
// amount of rows in the same order!
// TODO Order queries by hash only
// TODO After topo sort, do a stable sort by committer date
// TODO Limit by date or amount
// TODO Limit to tracked commits
let unsorted_hashes = sqlx::query_scalar!(
let mut unsorted_hashes = Vec::<String>::new();
let mut times_by_hash = HashMap::<String, i64>::new();
let mut rows = sqlx::query!(
"\
SELECT hash FROM commits \
ORDER BY unixepoch(committer_date) ASC, hash ASC \
SELECT \
hash, \
committer_date AS \"time: OffsetDateTime\" \
FROM commits \
ORDER BY hash ASC \
"
)
.fetch_all(&mut *conn)
.await?;
.fetch(&mut *conn);
while let Some(row) = rows.next().await {
let row = row?;
unsorted_hashes.push(row.hash.clone());
times_by_hash.insert(row.hash, row.time.unix_timestamp());
}
drop(rows);
let parent_child_pairs = sqlx::query!(
"\
SELECT parent, child \
FROM commit_links \
JOIN commits AS p ON p.hash = parent \
JOIN commits AS c ON c.hash = child \
ORDER BY \
unixepoch(p.committer_date) ASC, p.hash ASC, \
unixepoch(c.committer_date) ASC, c.hash ASC \
ORDER BY parent ASC, child ASC \
"
)
.fetch(&mut *conn)
@ -187,9 +193,10 @@ pub async fn get_graph_data(
.try_collect::<Vec<_>>()
.await?;
let sorted_hashes = util::sort_topologically(&unsorted_hashes, &parent_child_pairs);
let mut hashes = util::sort_topologically(&unsorted_hashes, &parent_child_pairs);
hashes.sort_by_key(|hash| times_by_hash[hash]);
let sorted_hash_indices = sorted_hashes
let sorted_hash_indices = hashes
.iter()
.cloned()
.enumerate()
@ -203,31 +210,22 @@ pub async fn get_graph_data(
parents.entry(parent_idx).or_default().push(child_idx);
}
// Collect times
let times = hashes
.iter()
.map(|hash| times_by_hash[hash])
.collect::<Vec<_>>();
// permutation[unsorted_index] = sorted_index
let permutation = unsorted_hashes
.iter()
.map(|h| sorted_hash_indices[h])
.map(|hash| sorted_hash_indices[hash])
.collect::<Vec<_>>();
// Collect and permutate commit times
let mut times = vec![0; sorted_hashes.len()];
let mut rows = sqlx::query_scalar!(
"\
SELECT committer_date AS \"time: OffsetDateTime\" FROM commits \
ORDER BY unixepoch(committer_date) ASC, hash ASC \
"
)
.fetch(&mut *conn)
.enumerate();
while let Some((i, time)) = rows.next().await {
times[permutation[i]] = time?.unix_timestamp();
}
drop(rows);
// Collect and permutate measurements
let mut measurements = HashMap::new();
for metric in form.metric {
let mut values = vec![None; sorted_hashes.len()];
let mut values = vec![None; hashes.len()];
let mut rows = sqlx::query_scalar!(
"\
WITH \
@ -241,8 +239,7 @@ pub async fn get_graph_data(
SELECT value \
FROM commits \
LEFT JOIN measurements USING (hash) \
WHERE reachable = 2 \
ORDER BY unixepoch(committer_date) ASC, hash ASC \
ORDER BY hash ASC \
",
metric,
)
@ -257,7 +254,7 @@ pub async fn get_graph_data(
}
Ok(Json(GraphData {
hashes: sorted_hashes,
hashes,
parents,
times,
measurements,