diff options
| author | Nathan Jaremko | 2019-01-02 22:25:33 -0500 | 
|---|---|---|
| committer | Nathan Jaremko | 2019-01-02 22:25:52 -0500 | 
| commit | 1570cde47f039ae0267d9261f1c2ad9f06fc70b7 (patch) | |
| tree | c6c8877391dc2894c5e8812879a51215f8090c96 /src/actions.rs | |
| parent | 879f808a7dd86e21b51abadfa5247d4e8dc5ead8 (diff) | |
| download | podcast-1570cde47f039ae0267d9261f1c2ad9f06fc70b7.tar.bz2 | |
Add the ability to specify episode names
Diffstat (limited to 'src/actions.rs')
| -rw-r--r-- | src/actions.rs | 104 | 
1 files changed, 102 insertions, 2 deletions
| diff --git a/src/actions.rs b/src/actions.rs index 0775aec..58cda4f 100644 --- a/src/actions.rs +++ b/src/actions.rs @@ -151,7 +151,7 @@ pub fn download_range(state: &State, p_search: &str, e_search: &str) -> Result<(      Ok(())  } -pub fn download_episode(state: &State, p_search: &str, e_search: &str) -> Result<()> { +pub fn download_episode_by_num(state: &State, p_search: &str, e_search: &str) -> Result<()> {      let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;      let ep_num = e_search          .parse::<usize>() @@ -170,6 +170,52 @@ pub fn download_episode(state: &State, p_search: &str, e_search: &str) -> Result      Ok(())  } +pub fn download_episode_by_name( +    state: &State, +    p_search: &str, +    e_search: &str, +    download_all: bool, +) -> Result<()> { +    let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?; + +    for subscription in &state.subscriptions { +        if re_pod.is_match(&subscription.title) { +            let podcast = Podcast::from_title(&subscription.title) +                .chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?; +            let episodes = podcast.episodes(); +            if download_all { +                episodes +                    .iter() +                    .filter(|ep| { +                        ep.title() +                            .unwrap_or_else(|| "".to_string()) +                            .contains(e_search) +                    }) +                    .for_each(|ep| { +                        ep.download(podcast.title()).unwrap_or_else(|_| { +                            println!("Error downloading episode: {}", podcast.title()) +                        }); +                    }) +            } else { +                let filtered_episodes: Vec<&Episode> = episodes +                    .iter() +                    .filter(|ep| { +                        ep.title() +                            .unwrap_or_else(|| "".to_string()) +                            .contains(e_search) +                    }) +                    .collect(); + +                if let Some(ep) = filtered_episodes.first() { +                    ep.download(podcast.title()) +                        .chain_err(|| "unable to download episode")?; +                } +            } +        } +    } +    Ok(()) +} +  pub fn download_all(state: &State, p_search: &str) -> Result<()> {      let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?; @@ -240,7 +286,7 @@ pub fn play_latest(state: &State, p_search: &str) -> Result<()> {      Ok(())  } -pub fn play_episode(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> { +pub fn play_episode_by_num(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> {      let re_pod: Regex =          Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;      let ep_num: usize = ep_num_string.parse::<usize>().unwrap(); @@ -287,6 +333,60 @@ pub fn play_episode(state: &State, p_search: &str, ep_num_string: &str) -> Resul      Ok(())  } +pub fn play_episode_by_name(state: &State, p_search: &str, ep_string: &str) -> Result<()> { +    let re_pod: Regex = +        Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?; +    let mut path: PathBuf = get_xml_dir()?; +    if let Err(err) = DirBuilder::new().recursive(true).create(&path) { +        eprintln!( +            "Couldn't create directory: {}\nReason: {}", +            path.to_str().unwrap(), +            err +        ); +        return Ok(()); +    } +    for subscription in &state.subscriptions { +        if re_pod.is_match(&subscription.title) { +            let mut filename: String = subscription.title.clone(); +            filename.push_str(".xml"); +            path.push(filename); + +            let mut file: File = File::open(&path).unwrap(); +            let mut content: Vec<u8> = Vec::new(); +            file.read_to_end(&mut content).unwrap(); + +            let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap()); +            let episodes = podcast.episodes(); +            let filtered_episodes: Vec<&Episode> = episodes +                .iter() +                .filter(|ep| { +                    ep.title() +                        .unwrap_or_else(|| "".to_string()) +                        .contains(ep_string) +                }) +                .collect(); +            if let Some(episode) = filtered_episodes.first() { +                filename = episode.title().unwrap(); +                filename.push_str(episode.extension().unwrap()); +                path = get_podcast_dir()?; +                path.push(podcast.title()); +                path.push(filename); +                if path.exists() { +                    launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?; +                } else { +                    launch_player( +                        episode +                            .url() +                            .chain_err(|| "unable to retrieve episode url")?, +                    )?; +                } +            } +            return Ok(()); +        } +    } +    Ok(()) +} +  pub fn check_for_update(version: &str) -> Result<()> {      println!("Checking for updates...");      let resp: String = | 
