Implement leon-wiki-graph command
This commit is contained in:
parent
60ba7721db
commit
8605f8d43f
5 changed files with 165 additions and 9 deletions
|
|
@ -1,4 +1,5 @@
|
|||
pub mod ingest;
|
||||
pub mod leon_wiki_graph;
|
||||
pub mod list_pages;
|
||||
pub mod longest_shortest_path;
|
||||
pub mod path;
|
||||
|
|
|
|||
94
brood/src/commands/leon_wiki_graph.rs
Normal file
94
brood/src/commands/leon_wiki_graph.rs
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, BufWriter};
|
||||
use std::path::Path;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::data::AdjacencyList;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Article {
|
||||
title: String,
|
||||
url: String,
|
||||
language: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct NodeRow {
|
||||
id: u32,
|
||||
label: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EdgeRow {
|
||||
source: u32,
|
||||
target: u32,
|
||||
}
|
||||
|
||||
pub fn run(datafile: &Path, articlesfile: &Path, language: &str) -> io::Result<()> {
|
||||
let mut databuf = BufReader::new(File::open(datafile)?);
|
||||
let data = AdjacencyList::read(&mut databuf)?;
|
||||
|
||||
let articlesbuf = BufReader::new(File::open(articlesfile)?);
|
||||
let articles: Vec<Article> =
|
||||
simd_json::from_reader(articlesbuf).expect("failed to parse articles file");
|
||||
|
||||
let titles = articles
|
||||
.into_iter()
|
||||
.filter(|a| a.language == language)
|
||||
.map(|a| a.title)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let page_ids = data
|
||||
.pages
|
||||
.split_last()
|
||||
.unwrap()
|
||||
.1
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, p)| titles.contains(&p.data.title))
|
||||
.map(|(i, _)| i as u32)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut node_rows = vec![];
|
||||
for i in &page_ids {
|
||||
let page = data.page(*i);
|
||||
let row = NodeRow {
|
||||
id: *i,
|
||||
label: page.data.title.clone(),
|
||||
};
|
||||
node_rows.push(row);
|
||||
}
|
||||
|
||||
let mut edge_rows = vec![];
|
||||
for i in &page_ids {
|
||||
let links = data
|
||||
.link_range(*i)
|
||||
.map(|li| data.link(li).to)
|
||||
.filter(|to| page_ids.contains(to))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
for to in links {
|
||||
let row = EdgeRow {
|
||||
source: *i,
|
||||
target: to,
|
||||
};
|
||||
edge_rows.push(row);
|
||||
}
|
||||
}
|
||||
|
||||
let node_writer = BufWriter::new(File::create("nodes.csv")?);
|
||||
let mut node_writer = csv::Writer::from_writer(node_writer);
|
||||
for node in node_rows {
|
||||
node_writer.serialize(node).unwrap();
|
||||
}
|
||||
|
||||
let edge_writer = BufWriter::new(File::create("edges.csv")?);
|
||||
let mut edge_writer = csv::Writer::from_writer(edge_writer);
|
||||
for edge in edge_rows {
|
||||
edge_writer.serialize(edge).unwrap();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -12,9 +12,7 @@ enum Command {
|
|||
/// Read sift data on stdin and output brood data.
|
||||
Ingest,
|
||||
/// Read and reexport brood data.
|
||||
Reexport {
|
||||
to: PathBuf,
|
||||
},
|
||||
Reexport { to: PathBuf },
|
||||
/// Find a path from one article to another.
|
||||
Path {
|
||||
from: String,
|
||||
|
|
@ -24,11 +22,11 @@ enum Command {
|
|||
flip: bool,
|
||||
},
|
||||
/// Find the longest shortest path starting at an article.
|
||||
LongestShortestPath {
|
||||
from: String,
|
||||
},
|
||||
// Print all page titles.
|
||||
LongestShortestPath { from: String },
|
||||
/// Print all page titles.
|
||||
ListPages,
|
||||
/// Construct wikipedia article graph for Leon.
|
||||
LeonWikiGraph { articles: PathBuf, language:String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
|
|
@ -54,5 +52,8 @@ fn main() -> io::Result<()> {
|
|||
commands::longest_shortest_path::run(&args.datafile, &from)
|
||||
}
|
||||
Command::ListPages => commands::list_pages::run(&args.datafile),
|
||||
Command::LeonWikiGraph { articles ,language} => {
|
||||
commands::leon_wiki_graph::run(&args.datafile, &articles,&language)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue