diff options
| author | A Farzat <a@farzat.xyz> | 2026-03-05 16:21:38 +0300 |
|---|---|---|
| committer | A Farzat <a@farzat.xyz> | 2026-03-05 16:21:38 +0300 |
| commit | 598ee0199a50d30d06f4a2fa67bed5943c6452b4 (patch) | |
| tree | 6274ae4a896eace66c57074e364e0e3499c70c83 /src | |
| parent | 71727a58aa8dac17cf7eeb37f79f1ab318bd66a2 (diff) | |
| download | oreilly-epub-598ee0199a50d30d06f4a2fa67bed5943c6452b4.tar.gz oreilly-epub-598ee0199a50d30d06f4a2fa67bed5943c6452b4.zip | |
Deserialize URLs to Url type directly
This saves the effort of having to do it manually later on. Also avoids
duplication in parsing.
Diffstat (limited to 'src')
| -rw-r--r-- | src/epub.rs | 19 | ||||
| -rw-r--r-- | src/models.rs | 3 |
2 files changed, 8 insertions, 14 deletions
diff --git a/src/epub.rs b/src/epub.rs index b995767..406135b 100644 --- a/src/epub.rs +++ b/src/epub.rs @@ -1,7 +1,7 @@ use crate::models::{Chapter, FileEntry}; use anyhow::{Context, Result}; use relative_path::{RelativePath, RelativePathBuf}; -use reqwest::{Client, Url}; +use reqwest::Client; use std::{ collections::HashMap, io::{Read, Write}, @@ -51,7 +51,7 @@ pub async fn download_all_files( let mut file = File::create(dest_path).await?; let bytes = client - .get(&entry.url) + .get(entry.url.clone()) .send() .await? .error_for_status()? @@ -87,10 +87,10 @@ pub fn create_epub_archive( write_container_xml_to_zip(&mut zip, &opf_entry.full_path)?; // Prepare url path to local path mapping to clean xhtml files from external dependencies. - let url_to_local = file_entries + let url_path_to_local = file_entries .iter() - .map(url_path_to_local) - .collect::<Result<HashMap<_, _>>>()?; + .map(|e| (e.url.path(), &e.full_path)) + .collect::<HashMap<_, _>>(); // Add the rest of the files according to file_entries. let options: FileOptions<()> = @@ -103,7 +103,7 @@ pub fn create_epub_archive( if chapters.contains_key(&entry.ourn) { let mut html = String::from_utf8(buffer)?; let chapter_dir = entry.full_path.parent().unwrap_or(RelativePath::new("")); - for (url_path, local_path) in &url_to_local { + for (url_path, local_path) in &url_path_to_local { let rel_path = chapter_dir.relative(local_path); html = html.replace(url_path, rel_path.as_str()); } @@ -117,10 +117,3 @@ pub fn create_epub_archive( Ok(()) } - -/// Helper function. Maps FileEntry to (url path, full_path) pair. -fn url_path_to_local(entry: &FileEntry) -> Result<(String, RelativePathBuf)> { - let url = Url::parse(&entry.url).with_context(|| format!("Could not parse: {}", entry.url))?; - let url_path = url.path().to_string(); - Ok((url_path, entry.full_path.clone())) -} diff --git a/src/models.rs b/src/models.rs index a3183e6..6035ce1 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,5 +1,6 @@ use relative_path::RelativePathBuf; use serde::Deserialize; +use url::Url; /// Generic Model for paginated API. #[derive(Debug, serde::Deserialize)] @@ -30,7 +31,7 @@ pub struct Chapter { #[derive(Debug, Deserialize)] pub struct FileEntry { pub ourn: String, - pub url: String, + pub url: Url, pub full_path: RelativePathBuf, pub media_type: String, pub filename: String, |
