Uses argh instead of Clap

pull/35/head
Tpt 5 years ago
parent bf35eec82f
commit 0440b8ce80
  1. 2
      server/Cargo.toml
  2. 51
      server/src/main.rs
  3. 2
      wikibase/Cargo.toml
  4. 102
      wikibase/src/main.rs

@ -12,8 +12,8 @@ edition = "2018"
[dependencies]
oxigraph = { path = "../lib", features = ["rocksdb"] }
argh = "0.1"
async-std = { version = "1", features = ["attributes"] }
async-h1 = "1"
clap = "2"
http-types = "1"
url = "2"

@ -9,15 +9,13 @@
unused_qualifications
)]
use argh::FromArgs;
use async_std::future::Future;
use async_std::io::{BufRead, Read};
use async_std::net::{TcpListener, TcpStream};
use async_std::prelude::*;
use async_std::sync::Arc;
use async_std::task::{block_on, spawn, spawn_blocking};
use clap::App;
use clap::Arg;
use clap::ArgMatches;
use http_types::headers::HeaderName;
use http_types::{headers, Body, Error, Method, Mime, Request, Response, Result, StatusCode};
use oxigraph::sparql::{PreparedQuery, QueryOptions, QueryResult, QueryResultSyntax};
@ -32,45 +30,36 @@ const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576;
const HTML_ROOT_PAGE: &str = include_str!("../templates/query.html");
const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION"));
#[derive(FromArgs)]
/// Oxigraph SPARQL server
struct Args {
/// specify a server socket to bind using the format $(HOST):$(PORT)
#[argh(option, short = 'b', default = "\"localhost:7878\".to_string()")]
bind: String,
/// directory in which persist the data. By default data are kept in memory
#[argh(option, short = 'f')]
file: Option<String>,
}
#[async_std::main]
pub async fn main() -> Result<()> {
let matches = App::new("Oxigraph SPARQL server")
.arg(
Arg::with_name("bind")
.short("b")
.long("bind")
.help("Specify a server socket to bind using the format $(HOST):$(PORT)")
.default_value("localhost:7878")
.takes_value(true),
)
.arg(
Arg::with_name("file")
.long("file")
.short("f")
.help("Directory in which persist the data. By default data are kept in memory.")
.takes_value(true),
)
.get_matches();
let args: Args = argh::from_env();
let file = matches.value_of("file").map(|v| v.to_string());
if let Some(file) = file {
main_with_dataset(Arc::new(RocksDbRepository::open(file)?), &matches).await
if let Some(file) = args.file {
main_with_dataset(Arc::new(RocksDbRepository::open(file)?), args.bind).await
} else {
main_with_dataset(Arc::new(MemoryRepository::default()), &matches).await
main_with_dataset(Arc::new(MemoryRepository::default()), args.bind).await
}
}
async fn main_with_dataset<R: Send + Sync + 'static>(
repository: Arc<R>,
matches: &ArgMatches<'_>,
) -> Result<()>
async fn main_with_dataset<R: Send + Sync + 'static>(repository: Arc<R>, host: String) -> Result<()>
where
for<'a> &'a R: Repository,
{
let addr = matches.value_of("bind").unwrap().to_owned();
println!("Listening for requests at http://{}", &addr);
println!("Listening for requests at http://{}", &host);
http_server(addr, move |request| {
http_server(host, move |request| {
handle_request(request, Arc::clone(&repository))
})
.await

@ -12,7 +12,7 @@ edition = "2018"
[dependencies]
oxigraph = {path = "../lib", features = ["rocksdb"] }
clap = "2"
argh = "0.1"
rouille = "3"
reqwest = "0.9"
serde_json = "1"

@ -10,9 +10,7 @@
)]
use crate::loader::WikibaseLoader;
use clap::App;
use clap::Arg;
use clap::ArgMatches;
use argh::FromArgs;
use oxigraph::sparql::{PreparedQuery, QueryOptions, QueryResult, QueryResultSyntax};
use oxigraph::{
FileSyntax, GraphSyntax, MemoryRepository, Repository, RepositoryConnection, RocksDbRepository,
@ -31,73 +29,57 @@ mod loader;
const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576;
const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION"));
#[derive(FromArgs)]
/// Oxigraph SPARQL server for Wikibase
struct Args {
/// specify a server socket to bind using the format $(HOST):$(PORT)
#[argh(option, short = 'b', default = "\"localhost:7878\".to_string()")]
bind: String,
/// directory in which persist the data. By default data are kept in memory
#[argh(option, short = 'f')]
file: Option<String>,
#[argh(option)]
/// base URL of the MediaWiki API like https://www.wikidata.org/w/api.php
mediawiki_api: String,
#[argh(option)]
/// base URL of MediaWiki like https://www.wikidata.org/wiki/
mediawiki_base_url: String,
#[argh(option)]
/// namespaces ids to load like "0,120"
namespaces: Option<String>,
#[argh(option)]
/// slot to load like "mediainfo". Could not be use with namespaces
slot: Option<String>,
}
pub fn main() {
let matches = App::new("Oxigraph SPARQL server")
.arg(
Arg::with_name("bind")
.long("bind")
.short("b")
.help("Specify a server socket to bind using the format $(HOST):$(PORT)")
.default_value("localhost:7878")
.takes_value(true),
)
.arg(
Arg::with_name("file")
.long("file")
.short("f")
.help("Directory in which persist the data. By default data are kept in memory.")
.takes_value(true),
)
.arg(
Arg::with_name("mediawiki_api")
.long("mediawiki_api")
.help("URL of the MediaWiki API like https://www.wikidata.org/w/api.php.")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("mediawiki_base_url")
.long("mediawiki_base_url")
.help("Base URL of MediaWiki like https://www.wikidata.org/wiki/")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("namespaces")
.long("namespaces")
.help("Namespaces ids to load like \"0,120\"")
.required(false)
.takes_value(true),
)
.arg(
Arg::with_name("slot")
.long("slot")
.help("Slot to load like \"mediainfo\". Could not be use with namespaces")
.required(false)
.takes_value(true),
)
.get_matches();
let args: Args = argh::from_env();
let file = matches.value_of("file").map(|v| v.to_string());
let file = args.file.clone();
if let Some(file) = file {
main_with_dataset(Arc::new(RocksDbRepository::open(file).unwrap()), &matches)
main_with_dataset(Arc::new(RocksDbRepository::open(file).unwrap()), args)
} else {
main_with_dataset(Arc::new(MemoryRepository::default()), &matches)
main_with_dataset(Arc::new(MemoryRepository::default()), args)
}
}
fn main_with_dataset<R: Send + Sync + 'static>(repository: Arc<R>, matches: &ArgMatches<'_>)
fn main_with_dataset<R: Send + Sync + 'static>(repository: Arc<R>, args: Args)
where
for<'a> &'a R: Repository,
{
let addr = matches.value_of("bind").unwrap().to_owned();
println!("Listening for requests at http://{}", &addr);
println!("Listening for requests at http://{}", &args.bind);
let repo = repository.clone();
let mediawiki_api = matches.value_of("mediawiki_api").unwrap().to_owned();
let mediawiki_base_url = matches.value_of("mediawiki_base_url").unwrap().to_owned();
let namespaces = matches
.value_of("namespaces")
let mediawiki_api = args.mediawiki_api.clone();
let mediawiki_base_url = args.mediawiki_base_url.clone();
let namespaces = args
.namespaces
.as_deref()
.unwrap_or("")
.split(',')
.flat_map(|t| {
@ -109,7 +91,7 @@ where
}
})
.collect::<Vec<_>>();
let slot = matches.value_of("slot").map(|t| t.to_owned());
let slot = args.slot.clone();
thread::spawn(move || {
let mut loader = WikibaseLoader::new(
repo.as_ref(),
@ -124,7 +106,7 @@ where
loader.update_loop();
});
start_server(addr, move |request| {
start_server(args.bind, move |request| {
content_encoding::apply(
request,
handle_request(request, repository.connection().unwrap()),

Loading…
Cancel
Save