aboutsummaryrefslogtreecommitdiffstats
path: root/src/actions.rs
diff options
context:
space:
mode:
authorNathan Jaremko2019-03-02 20:30:54 -0500
committerNathan Jaremko2019-03-02 20:30:54 -0500
commit9c9fb906cd6c05105d96fa586f429764b12169ca (patch)
tree1499d2e38c368952886fb72a01251be47448603c /src/actions.rs
parent2d34d8e812f3e85b0d10f126bf5f334847d0fbca (diff)
downloadpodcast-9c9fb906cd6c05105d96fa586f429764b12169ca.tar.bz2
0.10.0 - Lots of Improvements
Diffstat (limited to 'src/actions.rs')
-rw-r--r--src/actions.rs485
1 files changed, 62 insertions, 423 deletions
diff --git a/src/actions.rs b/src/actions.rs
index c0518c5..a44a9cf 100644
--- a/src/actions.rs
+++ b/src/actions.rs
@@ -1,12 +1,12 @@
+use crate::download;
+use crate::errors::*;
use crate::structs::*;
use crate::utils::*;
use std::collections::HashSet;
-use std::fs::{self, DirBuilder, File};
-use std::io::{self, BufReader, Read, Write};
-use std::process::Command;
+use std::fs::{self, File};
+use std::io::{self, BufReader, BufWriter, Write};
-use crate::errors::*;
use clap::App;
use clap::Shell;
use rayon::prelude::*;
@@ -17,77 +17,54 @@ use std::path::PathBuf;
use toml;
pub fn list_episodes(search: &str) -> Result<()> {
- let re = Regex::new(&format!("(?i){}", &search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let re = Regex::new(&format!("(?i){}", &search))?;
let path = get_xml_dir()?;
create_dir_if_not_exist(&path)?;
- for entry in fs::read_dir(&path).chain_err(|| UNABLE_TO_READ_DIRECTORY)? {
- let entry = entry.chain_err(|| UNABLE_TO_READ_ENTRY)?;
+ for entry in fs::read_dir(&path)? {
+ let entry = entry?;
if re.is_match(&entry.file_name().into_string().unwrap()) {
- let file = File::open(&entry.path()).chain_err(|| UNABLE_TO_OPEN_FILE)?;
- let channel = Channel::read_from(BufReader::new(file))
- .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?;
+ let file = File::open(&entry.path())?;
+ let channel = Channel::read_from(BufReader::new(file))?;
let podcast = Podcast::from(channel);
let episodes = podcast.episodes();
let stdout = io::stdout();
let mut handle = stdout.lock();
- for (num, ep) in episodes.iter().enumerate() {
- writeln!(
- &mut handle,
- "({}) {}",
- episodes.len() - num,
- ep.title()
- .chain_err(|| "unable to retrieve episode title")?
- )
- .ok();
- }
+ episodes
+ .iter()
+ .filter(|ep| ep.title().is_some())
+ .enumerate()
+ .for_each(|(num, ep)| {
+ writeln!(
+ &mut handle,
+ "({}) {}",
+ episodes.len() - num,
+ ep.title().unwrap()
+ )
+ .ok();
+ });
return Ok(());
}
}
Ok(())
}
-pub fn download_rss(config: &Config, url: &str) -> Result<()> {
- let channel = download_rss_feed(url)?;
- let mut download_limit = config.auto_download_limit as usize;
- if 0 < download_limit {
- println!("Subscribe auto-download limit set to: {}", download_limit);
- println!("Downloading episode(s)...");
- let podcast = Podcast::from(channel);
- let episodes = podcast.episodes();
- if episodes.len() < download_limit {
- download_limit = episodes.len()
- }
- episodes[..download_limit].par_iter().for_each(|ep| {
- if let Err(err) = download(podcast.title(), ep) {
- eprintln!("Error downloading {}: {}", podcast.title(), err);
- }
- });
- }
- Ok(())
-}
-
pub fn update_subscription(sub: &mut Subscription) -> Result<()> {
+ println!("Updating {}", sub.title);
let mut path: PathBuf = get_podcast_dir()?;
path.push(&sub.title);
create_dir_if_not_exist(&path)?;
let mut titles = HashSet::new();
- for entry in fs::read_dir(&path).chain_err(|| UNABLE_TO_READ_DIRECTORY)? {
- let unwrapped_entry = &entry.chain_err(|| UNABLE_TO_READ_ENTRY)?;
+ for entry in fs::read_dir(&path)? {
+ let unwrapped_entry = &entry?;
titles.insert(trim_extension(
&unwrapped_entry.file_name().into_string().unwrap(),
));
}
- let mut resp = reqwest::get(&sub.url).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?;
- let mut content: Vec<u8> = Vec::new();
- resp.read_to_end(&mut content)
- .chain_err(|| UNABLE_TO_READ_RESPONSE_TO_END)?;
- let podcast = Podcast::from(
- Channel::read_from(BufReader::new(&content[..]))
- .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_RESPONSE)?,
- );
+ let resp = reqwest::get(&sub.url)?;
+ let podcast = Podcast::from(Channel::read_from(BufReader::new(resp))?);
let mut filename = String::from(podcast.title());
filename.push_str(".xml");
@@ -95,17 +72,16 @@ pub fn update_subscription(sub: &mut Subscription) -> Result<()> {
let mut podcast_rss_path = get_xml_dir()?;
podcast_rss_path.push(&filename);
- let mut file = File::create(&podcast_rss_path).unwrap();
- file.write_all(&content).unwrap();
+ let file = File::create(&podcast_rss_path)?;
+ (*podcast).write_to(BufWriter::new(file))?;
if sub.num_episodes < podcast.episodes().len() {
- podcast.episodes()[..podcast.episodes().len() - sub.num_episodes]
+ podcast.episodes()
+ [..podcast.episodes().len() - sub.num_episodes]
.par_iter()
- .for_each(|ep: &Episode| {
- if let Err(err) = download(podcast.title(), ep) {
- eprintln!("Error downloading {}: {}", podcast.title(), err);
- }
- });
+ .map(|ep| download::download(podcast.title(), ep))
+ .flat_map(|e| e.err())
+ .for_each(|err| eprintln!("Error: {}", err));
}
sub.num_episodes = podcast.episodes().len();
Ok(())
@@ -114,327 +90,18 @@ pub fn update_subscription(sub: &mut Subscription) -> Result<()> {
pub fn update_rss(state: &mut State) {
println!("Checking for new episodes...");
let _result: Vec<Result<()>> = state
- .subscriptions
+ .subscriptions_mut()
.par_iter_mut()
.map(|sub: &mut Subscription| update_subscription(sub))
.collect();
+ println!("Done.");
}
pub fn list_subscriptions(state: &State) -> Result<()> {
let stdout = io::stdout();
let mut handle = stdout.lock();
- for podcast in state.subscriptions() {
- writeln!(&mut handle, "{}", &podcast.title).ok();
- }
- Ok(())
-}
-
-pub fn download_range(state: &State, p_search: &str, e_search: &str) -> Result<()> {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
-
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let podcast = Podcast::from_title(&subscription.title)
- .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
- let episodes_to_download = parse_download_episodes(e_search)
- .chain_err(|| "unable to parse episodes to download")?;
- podcast
- .download_specific(&episodes_to_download)
- .chain_err(|| "unable to download episodes")?;
- }
- }
- Ok(())
-}
-
-pub fn download_episode_by_num(state: &State, p_search: &str, e_search: &str) -> Result<()> {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
-
- if let Ok(ep_num) = e_search.parse::<usize>() {
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let podcast = Podcast::from_title(&subscription.title)
- .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
- let episodes = podcast.episodes();
- download(podcast.title(), &episodes[episodes.len() - ep_num])
- .chain_err(|| "unable to download episode")?;
- }
- }
- } else {
- {
- let stdout = io::stdout();
- let mut handle = stdout.lock();
- writeln!(
- &mut handle,
- "Failed to parse episode number...\nAttempting to find episode by name..."
- )
- .ok();
- }
- download_episode_by_name(state, p_search, e_search, false)
- .chain_err(|| "Failed to download episode.")?;
- }
-
- Ok(())
-}
-
-pub fn download(podcast_name: &str, episode: &Episode) -> Result<()> {
- let stdout = io::stdout();
-
- let mut path = get_podcast_dir()?;
- path.push(podcast_name);
- create_dir_if_not_exist(&path)?;
-
- if let Some(url) = episode.url() {
- if let Some(title) = episode.title() {
- let mut filename = title;
- filename.push_str(
- episode
- .extension()
- .chain_err(|| "unable to retrieve extension")?,
- );
- path.push(filename);
- if !path.exists() {
- {
- let mut handle = stdout.lock();
- writeln!(&mut handle, "Downloading: {:?}", &path).ok();
- }
- let mut file = File::create(&path).chain_err(|| UNABLE_TO_CREATE_FILE)?;
- let mut resp = reqwest::get(url).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?;
- let mut content: Vec<u8> = Vec::new();
- resp.read_to_end(&mut content)
- .chain_err(|| UNABLE_TO_READ_RESPONSE_TO_END)?;
- file.write_all(&content)
- .chain_err(|| UNABLE_TO_WRITE_FILE)?;
- } else {
- let mut handle = stdout.lock();
- writeln!(&mut handle, "File already exists: {:?}", &path).ok();
- }
- }
- }
- Ok(())
-}
-
-pub fn download_episode_by_name(
- state: &State,
- p_search: &str,
- e_search: &str,
- download_all: bool,
-) -> Result<()> {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
-
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let podcast = Podcast::from_title(&subscription.title)
- .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
- let episodes = podcast.episodes();
- if download_all {
- episodes
- .iter()
- .filter(|ep| ep.title().is_some())
- .filter(|ep| ep.title().unwrap().contains(e_search))
- .for_each(|ep| {
- download(podcast.title(), ep).unwrap_or_else(|_| {
- eprintln!("Error downloading episode: {}", podcast.title())
- });
- })
- } else {
- let filtered_episodes: Vec<&Episode> = episodes
- .iter()
- .filter(|ep| ep.title().is_some())
- .filter(|ep| {
- ep.title()
- .unwrap()
- .to_lowercase()
- .contains(&e_search.to_lowercase())
- })
- .collect();
-
- if let Some(ep) = filtered_episodes.first() {
- download(podcast.title(), ep).chain_err(|| "unable to download episode")?;
- }
- }
- }
- }
- Ok(())
-}
-
-pub fn download_all(state: &State, p_search: &str) -> Result<()> {
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
-
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let podcast = Podcast::from_title(&subscription.title)
- .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
- podcast
- .download()
- .chain_err(|| "unable to download podcast")?;
- }
- }
- Ok(())
-}
-
-pub fn play_latest(state: &State, p_search: &str) -> Result<()> {
- let re_pod: Regex =
- Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
- let mut path: PathBuf = get_xml_dir()?;
- DirBuilder::new()
- .recursive(true)
- .create(&path)
- .chain_err(|| UNABLE_TO_CREATE_DIRECTORY)?;
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let mut filename: String = subscription.title.clone();
- filename.push_str(".xml");
- path.push(filename);
-
- let mut file: File = File::open(&path).chain_err(|| UNABLE_TO_OPEN_FILE)?;
- let mut content: Vec<u8> = Vec::new();
- file.read_to_end(&mut content)
- .chain_err(|| "unable to read file to end")?;
-
- let podcast: Podcast = Podcast::from(
- Channel::read_from(content.as_slice())
- .chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?,
- );
- let episodes = podcast.episodes();
- let episode = episodes[0].clone();
-
- filename = episode
- .title()
- .chain_err(|| "unable to retrieve episode name")?;
- filename.push_str(
- episode
- .extension()
- .chain_err(|| "unable to retrieve episode extension")?,
- );
- path = get_podcast_dir()?;
- path.push(podcast.title());
- path.push(filename);
- if path.exists() {
- launch_player(
- path.to_str()
- .chain_err(|| "unable to convert path to &str")?,
- )?;
- } else {
- launch_player(
- episode
- .url()
- .chain_err(|| "unable to retrieve episode url")?,
- )?;
- }
- return Ok(());
- }
- }
- Ok(())
-}
-
-pub fn play_episode_by_num(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> {
- let re_pod: Regex =
- Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
- if let Ok(ep_num) = ep_num_string.parse::<usize>() {
- let mut path: PathBuf = get_xml_dir()?;
- if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
- eprintln!(
- "Couldn't create directory: {}\nReason: {}",
- path.to_str().unwrap(),
- err
- );
- return Ok(());
- }
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let mut filename: String = subscription.title.clone();
- filename.push_str(".xml");
- path.push(filename);
-
- let mut file: File = File::open(&path).unwrap();
- let mut content: Vec<u8> = Vec::new();
- file.read_to_end(&mut content).unwrap();
-
- let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
- let episodes = podcast.episodes();
- let episode = episodes[episodes.len() - ep_num].clone();
-
- filename = episode.title().unwrap();
- filename.push_str(episode.extension().unwrap());
- path = get_podcast_dir()?;
- path.push(podcast.title());
- path.push(filename);
- if path.exists() {
- launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?;
- } else {
- launch_player(
- episode
- .url()
- .chain_err(|| "unable to retrieve episode url")?,
- )?;
- }
- return Ok(());
- }
- }
- } else {
- {
- let stdout = io::stdout();
- let mut handle = stdout.lock();
- writeln!(&mut handle, "Failed to parse episode index number...").ok();
- writeln!(&mut handle, "Attempting to find episode by name...").ok();
- }
- play_episode_by_name(state, p_search, ep_num_string)
- .chain_err(|| "Failed to play episode by name.")?;
- }
- Ok(())
-}
-
-pub fn play_episode_by_name(state: &State, p_search: &str, ep_string: &str) -> Result<()> {
- let re_pod: Regex =
- Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
- let mut path: PathBuf = get_xml_dir()?;
- if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
- eprintln!(
- "Couldn't create directory: {}\nReason: {}",
- path.to_str().unwrap(),
- err
- );
- return Ok(());
- }
- for subscription in &state.subscriptions {
- if re_pod.is_match(&subscription.title) {
- let mut filename: String = subscription.title.clone();
- filename.push_str(".xml");
- path.push(filename);
-
- let mut file: File = File::open(&path).unwrap();
- let mut content: Vec<u8> = Vec::new();
- file.read_to_end(&mut content).unwrap();
-
- let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
- let episodes = podcast.episodes();
- let filtered_episodes: Vec<&Episode> = episodes
- .iter()
- .filter(|ep| {
- ep.title()
- .unwrap_or_else(|| "".to_string())
- .to_lowercase()
- .contains(&ep_string.to_lowercase())
- })
- .collect();
- if let Some(episode) = filtered_episodes.first() {
- filename = episode.title().unwrap();
- filename.push_str(episode.extension().unwrap());
- path = get_podcast_dir()?;
- path.push(podcast.title());
- path.push(filename);
- if path.exists() {
- launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?;
- } else {
- launch_player(
- episode
- .url()
- .chain_err(|| "unable to retrieve episode url")?,
- )?;
- }
- }
- return Ok(());
- }
+ for subscription in state.subscriptions() {
+ writeln!(&mut handle, "{}", subscription.title())?;
}
Ok(())
}
@@ -442,53 +109,13 @@ pub fn play_episode_by_name(state: &State, p_search: &str, ep_string: &str) -> R
pub fn check_for_update(version: &str) -> Result<()> {
println!("Checking for updates...");
let resp: String =
- reqwest::get("https://raw.githubusercontent.com/njaremko/podcast/master/Cargo.toml")
- .chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?
- .text()
- .chain_err(|| "unable to convert response to text")?;
+ reqwest::get("https://raw.githubusercontent.com/njaremko/podcast/master/Cargo.toml")?
+ .text()?;
- let config = resp
- .parse::<toml::Value>()
- .chain_err(|| "unable to parse toml")?;
- let latest = config["package"]["version"]
- .as_str()
- .chain_err(|| UNABLE_TO_CONVERT_TO_STR)?;
+ let config = resp.parse::<toml::Value>()?;
+ let latest = config["package"]["version"].as_str().expect(&format!("Cargo.toml didn't have a version {:?}", config));
if version != latest {
- println!("New version avaliable: {} -> {}", version, latest);
- }
- Ok(())
-}
-
-fn launch_player(url: &str) -> Result<()> {
- if launch_mpv(url).is_err() {
- return launch_vlc(url);
- }
- Ok(())
-}
-
-fn launch_mpv(url: &str) -> Result<()> {
- if let Err(err) = Command::new("mpv")
- .args(&["--audio-display=no", "--ytdl=no", url])
- .status()
- {
- match err.kind() {
- io::ErrorKind::NotFound => {
- eprintln!("Couldn't open mpv\nTrying vlc...");
- }
- _ => eprintln!("Error: {}", err),
- }
- }
- Ok(())
-}
-
-fn launch_vlc(url: &str) -> Result<()> {
- if let Err(err) = Command::new("vlc").args(&["-I ncurses", url]).status() {
- match err.kind() {
- io::ErrorKind::NotFound => {
- eprintln!("Couldn't open vlc...aborting");
- }
- _ => eprintln!("Error: {}", err),
- }
+ println!("New version available: {} -> {}", version, latest);
}
Ok(())
}
@@ -499,7 +126,7 @@ pub fn remove_podcast(state: &mut State, p_search: &str) -> Result<()> {
return delete_all();
}
- let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
+ let re_pod = Regex::new(&format!("(?i){}", &p_search))?;
for subscription in 0..state.subscriptions.len() {
let title = state.subscriptions[subscription].title.clone();
@@ -513,11 +140,23 @@ pub fn remove_podcast(state: &mut State, p_search: &str) -> Result<()> {
pub fn print_completion(app: &mut App, arg: &str) {
match arg {
- "zsh" => app.gen_completions_to("podcast", Shell::Zsh, &mut io::stdout()),
- "bash" => app.gen_completions_to("podcast", Shell::Bash, &mut io::stdout()),
- "powershell" => app.gen_completions_to("podcast", Shell::PowerShell, &mut io::stdout()),
- "fish" => app.gen_completions_to("podcast", Shell::Fish, &mut io::stdout()),
- "elvish" => app.gen_completions_to("podcast", Shell::Elvish, &mut io::stdout()),
- other => eprintln!("Completions are not available for {}", other),
+ "zsh" => {
+ app.gen_completions_to("podcast", Shell::Zsh, &mut io::stdout());
+ }
+ "bash" => {
+ app.gen_completions_to("podcast", Shell::Bash, &mut io::stdout());
+ }
+ "powershell" => {
+ app.gen_completions_to("podcast", Shell::PowerShell, &mut io::stdout());
+ }
+ "fish" => {
+ app.gen_completions_to("podcast", Shell::Fish, &mut io::stdout());
+ }
+ "elvish" => {
+ app.gen_completions_to("podcast", Shell::Elvish, &mut io::stdout());
+ }
+ other => {
+ println!("Completions are not available for {}", other);
+ }
}
}