Migrates from argh to clap

pull/93/head
Tpt 3 years ago
parent e5e1dbc720
commit a58a6fac8e
  1. 1
      server/Cargo.toml
  2. 43
      server/src/main.rs
  3. 2
      wikibase/Cargo.toml
  4. 97
      wikibase/src/main.rs

@ -20,6 +20,7 @@ default = ["rocksdb"]
argh = "0.1" argh = "0.1"
async-std = { version = "1", features = ["attributes"] } async-std = { version = "1", features = ["attributes"] }
async-h1 = "2" async-h1 = "2"
clap = "2"
http-types = "2" http-types = "2"
oxigraph = { version = "0.2", path="../lib", features = ["http_client"] } oxigraph = { version = "0.2", path="../lib", features = ["http_client"] }
rand = "0.8" rand = "0.8"

@ -9,12 +9,12 @@
unused_qualifications unused_qualifications
)] )]
use argh::FromArgs;
use async_std::future::Future; use async_std::future::Future;
use async_std::io::Read; use async_std::io::Read;
use async_std::net::{TcpListener, TcpStream}; use async_std::net::{TcpListener, TcpStream};
use async_std::prelude::*; use async_std::prelude::*;
use async_std::task::{block_on, spawn}; use async_std::task::{block_on, spawn};
use clap::{App, Arg};
use http_types::content::ContentType; use http_types::content::ContentType;
use http_types::{ use http_types::{
bail_status, format_err_status, headers, Error, Method, Mime, Request, Response, Result, bail_status, format_err_status, headers, Error, Method, Mime, Request, Response, Result,
@ -39,28 +39,31 @@ const HTML_ROOT_PAGE: &str = include_str!("../templates/query.html");
const LOGO: &str = include_str!("../logo.svg"); const LOGO: &str = include_str!("../logo.svg");
const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION")); const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION"));
#[derive(FromArgs)]
/// Oxigraph SPARQL server
struct Args {
/// specify a server socket to bind using the format $(HOST):$(PORT)
#[argh(option, short = 'b', default = "\"localhost:7878\".to_string()")]
bind: String,
/// directory in which persist the data
#[argh(option, short = 'f')]
file: String,
}
#[async_std::main] #[async_std::main]
pub async fn main() -> Result<()> { pub async fn main() -> Result<()> {
let args: Args = argh::from_env(); let matches = App::new("Oxigraph SPARQL server")
let store = Store::open(args.file)?; .arg(
Arg::with_name("bind")
.short("b")
.long("bind")
.help("Sets a custom config file")
.takes_value(true),
)
.arg(
Arg::with_name("file")
.short("f")
.long("file")
.help("directory in which persist the data")
.takes_value(true)
.required(true),
)
.get_matches();
let bind = matches.value_of("bind").unwrap_or("localhost:7878");
let file = matches.value_of("file").unwrap();
println!("Listening for requests at http://{}", &args.bind); let store = Store::open(file)?;
http_server(&args.bind, move |request| { println!("Listening for requests at http://{}", &bind);
handle_request(request, store.clone()) http_server(&bind, move |request| handle_request(request, store.clone())).await
})
.await
} }
async fn handle_request(request: Request, store: Store) -> Result<Response> { async fn handle_request(request: Request, store: Store) -> Result<Response> {

@ -11,7 +11,7 @@ SPARQL server based on Oxigraph for Wikibase instances
edition = "2018" edition = "2018"
[dependencies] [dependencies]
argh = "0.1" clap = "2"
async-std = { version = "1", features = ["attributes"] } async-std = { version = "1", features = ["attributes"] }
async-h1 = "2" async-h1 = "2"
chrono = "0.4" chrono = "0.4"

@ -10,11 +10,11 @@
)] )]
use crate::loader::WikibaseLoader; use crate::loader::WikibaseLoader;
use argh::FromArgs;
use async_std::future::Future; use async_std::future::Future;
use async_std::net::{TcpListener, TcpStream}; use async_std::net::{TcpListener, TcpStream};
use async_std::prelude::*; use async_std::prelude::*;
use async_std::task::spawn; use async_std::task::spawn;
use clap::{App, Arg};
use http_types::content::ContentType; use http_types::content::ContentType;
use http_types::{ use http_types::{
bail_status, format_err_status, headers, Error, Method, Mime, Request, Response, Result, bail_status, format_err_status, headers, Error, Method, Mime, Request, Response, Result,
@ -33,43 +33,57 @@ mod loader;
const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576; const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576;
const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION")); const SERVER: &str = concat!("Oxigraph/", env!("CARGO_PKG_VERSION"));
#[derive(FromArgs)]
/// Oxigraph SPARQL server for Wikibase
struct Args {
/// specify a server socket to bind using the format $(HOST):$(PORT)
#[argh(option, short = 'b', default = "\"localhost:7878\".to_string()")]
bind: String,
/// directory in which persist the data
#[argh(option, short = 'f')]
file: String,
#[argh(option)]
/// base URL of the MediaWiki API like https://www.wikidata.org/w/api.php
mediawiki_api: String,
#[argh(option)]
/// base URL of MediaWiki like https://www.wikidata.org/wiki/
mediawiki_base_url: String,
#[argh(option)]
/// namespaces ids to load like "0,120"
namespaces: Option<String>,
#[argh(option)]
/// slot to load like "mediainfo". Could not be use with namespaces
slot: Option<String>,
}
#[async_std::main] #[async_std::main]
pub async fn main() -> Result<()> { pub async fn main() -> Result<()> {
let args: Args = argh::from_env(); let matches = App::new("Oxigraph SPARQL server for Wikibase")
.arg(
let store = RocksDbStore::open(args.file)?; Arg::with_name("bind")
let mediawiki_api = args.mediawiki_api.clone(); .short("b")
let mediawiki_base_url = args.mediawiki_base_url.clone(); .long("bind")
let namespaces = args .help("Sets a custom config file")
.namespaces .takes_value(true),
)
.arg(
Arg::with_name("file")
.short("f")
.long("file")
.help("directory in which persist the data")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("mediawiki_api")
.long("mediawiki_api")
.help("base URL of the MediaWiki API like https://www.wikidata.org/w/api.php")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("mediawiki_base_url")
.long("mediawiki_base_url")
.help("base URL of MediaWiki like https://www.wikidata.org/wiki/")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("namespaces")
.long("namespaces")
.help("namespaces ids to load like '0,120'")
.takes_value(true),
)
.arg(
Arg::with_name("slot")
.long("slot")
.help("slot to load like 'mediainfo'. Could not be use with namespaces")
.takes_value(true),
)
.get_matches();
let bind = matches.value_of("bind").unwrap_or("localhost:7878");
let file = matches.value_of("file").unwrap();
let mediawiki_api = matches.value_of("mediawiki_api").unwrap();
let mediawiki_base_url = matches.value_of("mediawiki_base_url").unwrap();
let namespaces = matches
.value_of("namespaces")
.as_deref() .as_deref()
.unwrap_or("") .unwrap_or("")
.split(',') .split(',')
@ -82,7 +96,9 @@ pub async fn main() -> Result<()> {
} }
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let slot = args.slot.clone(); let slot = matches.value_of("slot");
let store = RocksDbStore::open(file)?;
let repo = store.clone(); let repo = store.clone();
let mut loader = WikibaseLoader::new( let mut loader = WikibaseLoader::new(
repo, repo,
@ -98,12 +114,9 @@ pub async fn main() -> Result<()> {
loader.update_loop(); loader.update_loop();
}); });
println!("Listening for requests at http://{}", &args.bind); println!("Listening for requests at http://{}", &bind);
http_server(&args.bind, move |request| { http_server(&bind, move |request| handle_request(request, store.clone())).await
handle_request(request, store.clone())
})
.await
} }
async fn handle_request(request: Request, store: RocksDbStore) -> Result<Response> { async fn handle_request(request: Request, store: RocksDbStore) -> Result<Response> {

Loading…
Cancel
Save