diff options
author | NerdyPepper <[email protected]> | 2018-06-17 03:55:43 +0100 |
---|---|---|
committer | NerdyPepper <[email protected]> | 2018-06-17 03:55:43 +0100 |
commit | 938423224b0cfed3d7a36494bdf56b62e186b1c1 (patch) | |
tree | 5a4df550a9017e65a670ae1afcd196397c0dc645 | |
parent | 3982993f18538932bfdd8d310b63bca02324fece (diff) |
Refactor into content.rs
-rw-r--r-- | src/content.rs | 38 | ||||
-rw-r--r-- | src/main.rs | 28 |
2 files changed, 46 insertions, 20 deletions
diff --git a/src/content.rs b/src/content.rs new file mode 100644 index 0000000..36d4da6 --- /dev/null +++ b/src/content.rs | |||
@@ -0,0 +1,38 @@ | |||
1 | pub fn url_gen(title: &str) -> String { | ||
2 | |||
3 | // /w/api.php?action=query&format=json&prop=extracts&titles=rust&explaintext=1 | ||
4 | title.replace(" ", "%20"); | ||
5 | |||
6 | // query config | ||
7 | let url = "https://en.wikipedia.org"; | ||
8 | url.push_str("w/api.php?"); | ||
9 | url.push_str("action=query&"); | ||
10 | url.push_str("format=json&"); | ||
11 | url.push_str("prop=extracts&"); | ||
12 | url.push_str(format!("titles={}", title)); | ||
13 | url.push_str("explaintext=1"); | ||
14 | } | ||
15 | |||
16 | pub fn get_extract(title: &str, red: Response) -> String { | ||
17 | let mut v: Value = serde_json::from_str(&res.text().unwrap()).unwrap(); | ||
18 | |||
19 | // Fetch page and pageids of requested title(s) | ||
20 | let pageid = &v["query"]["pageids"][0]; | ||
21 | let pageid_str = match pageid { | ||
22 | Value::String(id) => id, | ||
23 | _ => panic!("wut"), | ||
24 | }; | ||
25 | |||
26 | if pageid_str == "-1" { | ||
27 | String::from("No such page") | ||
28 | } else { | ||
29 | format!("{}", &v["query"]["pages"][pageid_str]["extract"]) | ||
30 | } | ||
31 | } | ||
32 | |||
33 | pub fn get_title(title: &str, res: Response) -> String { | ||
34 | let mut v: Value = serde_json::from_str(&res.text().unwrap()).unwrap_or_else( |e| { | ||
35 | panic!("Recieved invalid json"); | ||
36 | } ); | ||
37 | format!("{}", &v["query"]["normalized"][0]["to"]) | ||
38 | } | ||
diff --git a/src/main.rs b/src/main.rs index 951c94c..1573cdb 100644 --- a/src/main.rs +++ b/src/main.rs | |||
@@ -3,33 +3,21 @@ extern crate serde_json; | |||
3 | 3 | ||
4 | use serde_json::Value; | 4 | use serde_json::Value; |
5 | 5 | ||
6 | fn main() { | 6 | pub mod content; |
7 | println!("{}", get_extract("scale")); | ||
8 | } | ||
9 | 7 | ||
10 | fn get_extract(title: &str) -> String { | 8 | fn main() { |
11 | let url = format!("https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&list=&meta=&indexpageids=1&continue=%7C%7Cimageinfo&titles={}&exlimit=20&explaintext=1&exsectionformat=plain", title); | 9 | let url = format!("https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&list=&meta=&indexpageids=1&continue=%7C%7Cimageinfo&titles={}&exlimit=20&explaintext=1&exsectionformat=plain", title); |
12 | let res = reqwest::get(&url); | 10 | let res = reqwest::get(&url); |
13 | 11 | ||
14 | match res { | 12 | match res { |
15 | Ok(mut res) => { | 13 | Ok(res) => { |
16 | if res.status().is_success() { | 14 | if res.status().is_success() { |
17 | let mut v: Value = serde_json::from_str(&res.text().unwrap()).unwrap(); | 15 | content::get_extract("") |
18 | |||
19 | // Fetch page and pageids of requested title(s) | ||
20 | let pageid = &v["query"]["pageids"][0]; | ||
21 | let pageid_str = match pageid { | ||
22 | Value::String(id) => id, | ||
23 | _ => panic!("wut"), | ||
24 | }; | ||
25 | |||
26 | format!("{:#}", &v["query"]["pages"][pageid_str]["extract"]) | ||
27 | } else { | ||
28 | format!("Error while parsing url.\nRecieved {}", res.status()) | ||
29 | } | 16 | } |
30 | }, | 17 | } |
31 | Err(_) => { | 18 | |
32 | format!("Failed to parse URL") | 19 | Err(_) { |
20 | panic!("Oh no!"); | ||
33 | } | 21 | } |
34 | } | 22 | } |
35 | } | 23 | } |