aboutsummaryrefslogtreecommitdiffstats
path: root/src/actions.rs
diff options
context:
space:
mode:
authorNathan Jaremko2018-05-13 23:11:49 -0400
committerNathan Jaremko2018-05-13 23:11:49 -0400
commitc449861e1940b9f32569a2b75504244742e3dae0 (patch)
treec706f095e6f31a1910fc84e4997bda18877329a1 /src/actions.rs
parent8c72734e1abbccbeee833270121c2b86500a81d8 (diff)
downloadpodcast-c449861e1940b9f32569a2b75504244742e3dae0.tar.bz2
Improve error handling
Diffstat (limited to 'src/actions.rs')
-rw-r--r--src/actions.rs338
1 files changed, 178 insertions, 160 deletions
diff --git a/src/actions.rs b/src/actions.rs
index 3c1a6e9..8adea4c 100644
--- a/src/actions.rs
+++ b/src/actions.rs
@@ -6,25 +6,31 @@ use std::fs::{self, DirBuilder, File};
use std::io::{self, BufReader, Read, Write};
use std::process::Command;
+use errors::*;
use rayon::prelude::*;
use regex::Regex;
use reqwest;
use rss::Channel;
+use std::path::PathBuf;
use toml;
-pub fn list_episodes(search: &str) {
+pub fn list_episodes(search: &str) -> Result<()> {
let stdout = io::stdout();
let mut handle = stdout.lock();
- let re = Regex::new(&format!("(?i){}", &search)).expect("Failed to parse regex");
- let mut path = get_podcast_dir();
+ let re = Regex::new(&format!("(?i){}", &search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let mut path = get_podcast_dir()?;
path.push(".rss");
- DirBuilder::new().recursive(true).create(&path).unwrap();
- for entry in fs::read_dir(&path).unwrap() {
- let entry = entry.unwrap();
+ DirBuilder::new()
+ .recursive(true)
+ .create(&path)
+ .chain_err(|| UNABLE_TO_CREATE_DIRECTORY)?;
+ for entry in fs::read_dir(&path).chain_err(|| UNABLE_TO_READ_DIRECTORY)? {
+ let entry = entry.chain_err(|| UNABLE_TO_READ_ENTRY)?;
if re.is_match(&entry.file_name().into_string().unwrap()) {
- let file = File::open(&entry.path()).unwrap();
- let channel = Channel::read_from(BufReader::new(file)).unwrap();
+ let file = File::open(&entry.path()).chain_err(|| UNABLE_TO_OPEN_FILE)?;
+ let channel = Channel::read_from(BufReader::new(file))
+ .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?;
let podcast = Podcast::from(channel);
let episodes = podcast.episodes();
for (num, ep) in episodes.iter().enumerate() {
@@ -32,200 +38,217 @@ pub fn list_episodes(search: &str) {
&mut handle,
"({}) {}\n",
episodes.len() - num,
- ep.title().unwrap()
- ).is_ok();
+ ep.title().chain_err(|| "unable to retrieve episode title")?
+ ).chain_err(|| "unable to write to stdout")?
}
- return;
+ return Ok(());
}
}
+ Ok(())
}
-pub fn subscribe_rss(url: &str) {
+pub fn subscribe_rss(url: &str) -> Result<Channel> {
println!("Downloading RSS feed...");
- if let Err(err) = download_rss_feed(url) {
- eprintln!("Error: {}", err);
- }
+ download_rss_feed(url)
}
-pub fn download_rss(config: &Config, url: &str) {
+pub fn download_rss(config: &Config, url: &str) -> Result<()> {
println!("Downloading episode(s)...");
- match download_rss_feed(url) {
- Ok(channel) => {
- let download_limit = config.auto_download_limit as usize;
- if download_limit > 0 {
- let podcast = Podcast::from(channel);
- let episodes = podcast.episodes();
- episodes[..download_limit].par_iter().for_each(|ep| {
- if let Err(err) = ep.download(podcast.title()) {
- eprintln!("Error downloading {}: {}", podcast.title(), err);
- }
- });
+ let channel = download_rss_feed(url)?;
+ let download_limit = config.auto_download_limit as usize;
+ if download_limit > 0 {
+ let podcast = Podcast::from(channel);
+ let episodes = podcast.episodes();
+ episodes[..download_limit].par_iter().for_each(|ep| {
+ if let Err(err) = ep.download(podcast.title()) {
+ eprintln!("Error downloading {}: {}", podcast.title(), err);
}
- }
- Err(err) => eprintln!("Error: {}", err),
+ });
}
+ Ok(())
+}
+
+pub fn update_subscription(sub: &mut Subscription) -> Result<()> {
+ let mut path: PathBuf = get_podcast_dir()?;
+ path.push(&sub.title);
+ DirBuilder::new()
+ .recursive(true)
+ .create(&path)
+ .chain_err(|| UNABLE_TO_CREATE_DIRECTORY)?;
+
+ let mut titles = HashSet::new();
+ for entry in fs::read_dir(&path).chain_err(|| UNABLE_TO_READ_DIRECTORY)? {
+ let unwrapped_entry = &entry.chain_err(|| UNABLE_TO_READ_ENTRY)?;
+ titles.insert(trim_extension(&unwrapped_entry
+ .file_name()
+ .into_string()
+ .unwrap()));
+ }
+
+ let mut resp = reqwest::get(&sub.url).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?;
+ let mut content: Vec<u8> = Vec::new();
+ resp.read_to_end(&mut content)
+ .chain_err(|| UNABLE_TO_READ_RESPONSE_TO_END)?;
+ let podcast = Podcast::from(Channel::read_from(BufReader::new(&content[..]))
+ .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_RESPONSE)?);
+ path = get_podcast_dir()?;
+ path.push(".rss");
+
+ let mut filename = String::from(podcast.title());
+ filename.push_str(".xml");
+ path.push(&filename);
+ let mut file = File::create(&path).unwrap();
+ file.write_all(&content).unwrap();
+
+ if podcast.episodes().len() > sub.num_episodes {
+ podcast.episodes()[..podcast.episodes().len() - sub.num_episodes]
+ .par_iter()
+ .for_each(|ep: &Episode| {
+ if let Err(err) = ep.download(podcast.title()) {
+ eprintln!("Error downloading {}: {}", podcast.title(), err);
+ }
+ });
+ }
+ sub.num_episodes = podcast.episodes().len();
+ Ok(())
}
pub fn update_rss(state: &mut State) {
println!("Checking for new episodes...");
- state.subscriptions.par_iter_mut().for_each(|sub| {
- let mut path = get_podcast_dir();
- path.push(&sub.title);
- DirBuilder::new().recursive(true).create(&path).unwrap();
-
- let mut titles = HashSet::new();
- for entry in fs::read_dir(&path).unwrap() {
- let entry = entry.unwrap();
- titles.insert(trim_extension(&entry.file_name().into_string().unwrap()));
- }
-
- let mut resp = reqwest::get(&sub.url).unwrap();
- let mut content: Vec<u8> = Vec::new();
- resp.read_to_end(&mut content).unwrap();
- let podcast = Podcast::from(Channel::read_from(BufReader::new(&content[..])).unwrap());
- path = get_podcast_dir();
- path.push(".rss");
-
- let mut filename = String::from(podcast.title());
- filename.push_str(".xml");
- path.push(&filename);
- let mut file = File::create(&path).unwrap();
- file.write_all(&content).unwrap();
-
- if podcast.episodes().len() > sub.num_episodes {
- podcast.episodes()[..podcast.episodes().len() - sub.num_episodes]
- .par_iter()
- .for_each(|ep| {
- if let Err(err) = ep.download(podcast.title()) {
- eprintln!("Error downloading {}: {}", podcast.title(), err);
- }
- });
- }
- sub.num_episodes = podcast.episodes().len();
- });
+ let _result: Vec<Result<()>> = state
+ .subscriptions
+ .par_iter_mut()
+ .map(|sub: &mut Subscription| update_subscription(sub))
+ .collect();
}
-pub fn list_subscriptions(state: &State) {
+pub fn list_subscriptions(state: &State) -> Result<()> {
let stdout = io::stdout();
let mut handle = stdout.lock();
for podcast in &state.subscriptions() {
- write!(&mut handle, "{}\n", &podcast.title).is_ok();
+ write!(&mut handle, "{}\n", &podcast.title).chain_err(|| "unable to write to stdout")?;
}
+ Ok(())
}
-pub fn download_range(state: &State, p_search: &str, e_search: &str) {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
+pub fn download_range(state: &State, p_search: &str, e_search: &str) -> Result<()> {
+ let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
- match Podcast::from_title(&subscription.title) {
- Ok(podcast) => match parse_download_episodes(e_search) {
- Ok(episodes_to_download) => {
- if let Err(err) = podcast.download_specific(&episodes_to_download) {
- eprintln!("Error: {}", err);
- }
- }
- Err(err) => eprintln!("Error: {}", err),
- },
- Err(err) => eprintln!("Error: {}", err),
- }
+ let podcast = Podcast::from_title(&subscription.title)
+ .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
+ let episodes_to_download = parse_download_episodes(e_search)
+ .chain_err(|| "unable to parse episodes to download")?;
+ podcast
+ .download_specific(&episodes_to_download)
+ .chain_err(|| "unable to download episodes")?;
}
}
+ Ok(())
}
-pub fn download_episode(state: &State, p_search: &str, e_search: &str) {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
- let ep_num = e_search.parse::<usize>().unwrap();
+pub fn download_episode(state: &State, p_search: &str, e_search: &str) -> Result<()> {
+ let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let ep_num = e_search
+ .parse::<usize>()
+ .chain_err(|| "unable to parse number")?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
- match Podcast::from_title(&subscription.title) {
- Ok(podcast) => {
- let episodes = podcast.episodes();
- if let Err(err) = episodes[episodes.len() - ep_num].download(podcast.title()) {
- eprintln!("{}", err);
- }
- }
- Err(err) => eprintln!("Error: {}", err),
- }
+ let podcast = Podcast::from_title(&subscription.title)
+ .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
+ let episodes = podcast.episodes();
+ episodes[episodes.len() - ep_num]
+ .download(podcast.title())
+ .chain_err(|| "unable to download episode")?;
}
}
+ Ok(())
}
-pub fn download_all(state: &State, p_search: &str) {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
+pub fn download_all(state: &State, p_search: &str) -> Result<()> {
+ let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
- match Podcast::from_title(&subscription.title) {
- Ok(podcast) => if let Err(err) = podcast.download() {
- eprintln!("{}", err);
- },
- Err(err) => eprintln!("Error: {}", err),
- }
+ let podcast = Podcast::from_title(&subscription.title)
+ .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
+ podcast
+ .download()
+ .chain_err(|| "unable to download podcast")?;
}
}
+ Ok(())
}
-pub fn play_latest(state: &State, p_search: &str) {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
- let mut path = get_xml_dir();
- if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
- eprintln!(
- "Couldn't create directory: {}\nReason: {}",
- path.to_str().unwrap(),
- err
- );
- return;
- }
+pub fn play_latest(state: &State, p_search: &str) -> Result<()> {
+ let re_pod: Regex =
+ Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let mut path: PathBuf = get_xml_dir()?;
+ DirBuilder::new()
+ .recursive(true)
+ .create(&path)
+ .chain_err(|| UNABLE_TO_CREATE_DIRECTORY)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
- let mut filename = subscription.title.clone();
+ let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
- let mut file = File::open(&path).unwrap();
+ let mut file: File = File::open(&path).chain_err(|| UNABLE_TO_OPEN_FILE)?;
let mut content: Vec<u8> = Vec::new();
- file.read_to_end(&mut content).unwrap();
+ file.read_to_end(&mut content)
+ .chain_err(|| "unable to read file to end")?;
- let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
+ let podcast: Podcast = Podcast::from(Channel::read_from(content.as_slice())
+ .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?);
let episodes = podcast.episodes();
let episode = episodes[0].clone();
- filename = String::from(episode.title().unwrap());
- filename.push_str(episode.extension().unwrap());
- path = get_podcast_dir();
+ filename = String::from(episode
+ .title()
+ .chain_err(|| "unable to retrieve episode name")?);
+ filename.push_str(episode
+ .extension()
+ .chain_err(|| "unable to retrieve episode extension")?);
+ path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
- launch_player(path.to_str().unwrap());
+ launch_player(path.to_str()
+ .chain_err(|| "unable to convert path to &str")?)?;
} else {
- launch_player(episode.url().unwrap());
+ launch_player(episode
+ .url()
+ .chain_err(|| "unable to retrieve episode url")?)?;
}
- return;
+ return Ok(());
}
}
+ Ok(())
}
-pub fn play_episode(state: &State, p_search: &str, ep_num_string: &str) {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
- let ep_num = ep_num_string.parse::<usize>().unwrap();
- let mut path = get_xml_dir();
+pub fn play_episode(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> {
+ let re_pod: Regex =
+ Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let ep_num: usize = ep_num_string.parse::<usize>().unwrap();
+ let mut path: PathBuf = get_xml_dir()?;
if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
eprintln!(
"Couldn't create directory: {}\nReason: {}",
path.to_str().unwrap(),
err
);
- return;
+ return Ok(());
}
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
- let mut filename = subscription.title.clone();
+ let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
- let mut file = File::open(&path).unwrap();
+ let mut file: File = File::open(&path).unwrap();
let mut content: Vec<u8> = Vec::new();
file.read_to_end(&mut content).unwrap();
@@ -235,46 +258,48 @@ pub fn play_episode(state: &State, p_search: &str, ep_num_string: &str) {
filename = String::from(episode.title().unwrap());
filename.push_str(episode.extension().unwrap());
- path = get_podcast_dir();
+ path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
- launch_player(path.to_str().unwrap());
+ launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?;
} else {
- launch_player(episode.url().unwrap());
+ launch_player(episode.url().chain_err(|| "unable to retrieve episode url")?)?;
}
- return;
+ return Ok(());
}
}
+ Ok(())
}
-pub fn check_for_update(version: &str) {
+pub fn check_for_update(version: &str) -> Result<()> {
println!("Checking for updates...");
let resp: String = reqwest::get(
"https://raw.githubusercontent.com/njaremko/podcast/master/Cargo.toml",
- ).unwrap()
+ ).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?
.text()
- .unwrap();
+ .chain_err(|| "unable to convert response to text")?;
//println!("{}", resp);
- match resp.parse::<toml::Value>() {
- Ok(config) => {
- let latest = config["package"]["version"].as_str().unwrap();
- if version != latest {
- println!("New version avaliable: {} -> {}", version, latest);
- }
- }
- Err(err) => eprintln!("{}", err),
+ let config = resp.parse::<toml::Value>()
+ .chain_err(|| "unable to parse toml")?;
+ let latest = config["package"]["version"]
+ .as_str()
+ .chain_err(|| UNABLE_TO_CONVERT_TO_STR)?;
+ if version != latest {
+ println!("New version avaliable: {} -> {}", version, latest);
}
+ Ok(())
}
-fn launch_player(url: &str) {
+fn launch_player(url: &str) -> Result<()> {
if launch_mpv(url).is_err() {
- launch_vlc(url)
+ return launch_vlc(url);
}
+ Ok(())
}
-fn launch_mpv(url: &str) -> io::Result<()> {
+fn launch_mpv(url: &str) -> Result<()> {
if let Err(err) = Command::new("mpv")
.args(&["--audio-display=no", "--ytdl=no", url])
.status()
@@ -282,7 +307,6 @@ fn launch_mpv(url: &str) -> io::Result<()> {
match err.kind() {
io::ErrorKind::NotFound => {
eprintln!("Couldn't open mpv\nTrying vlc...");
- return Err(err);
}
_ => eprintln!("Error: {}", err),
}
@@ -290,39 +314,33 @@ fn launch_mpv(url: &str) -> io::Result<()> {
Ok(())
}
-fn launch_vlc(url: &str) {
+fn launch_vlc(url: &str) -> Result<()> {
if let Err(err) = Command::new("vlc").args(&["-I ncurses", url]).status() {
match err.kind() {
io::ErrorKind::NotFound => {
- eprintln!("vlc not found in PATH\nAborting...");
+ eprintln!("Couldn't open vlc...aborting");
}
_ => eprintln!("Error: {}", err),
}
}
+ Ok(())
}
-pub fn remove_podcast(state: &mut State, p_search: &str) {
+pub fn remove_podcast(state: &mut State, p_search: &str) -> Result<()> {
if p_search == "*" {
- match Podcast::delete_all() {
- Ok(_) => println!("Success"),
- Err(err) => eprintln!("Error: {}", err),
- }
- return;
+ return Podcast::delete_all();
}
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).expect("Failed to parse regex");
+ let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in 0..state.subscriptions.len() {
let title = state.subscriptions[subscription].title.clone();
if re_pod.is_match(&title) {
state.subscriptions.remove(subscription);
- match Podcast::delete(&title) {
- Ok(_) => println!("Success"),
- Err(err) => eprintln!("Error: {}", err),
- }
- break;
+ Podcast::delete(&title)?;
}
}
+ Ok(())
}
pub fn print_completion(arg: &str) {