diff options
| author | A Farzat <a@farzat.xyz> | 2026-03-08 21:47:32 +0300 |
|---|---|---|
| committer | A Farzat <a@farzat.xyz> | 2026-03-08 21:47:32 +0300 |
| commit | a349fd340fc63c476358237dbcaafb6ed4a2c521 (patch) | |
| tree | 866acad0095e0755f57436d11cd9c78a34544b07 /src/main.rs | |
| parent | ab5ce397987bd97c920101a8d393d0fde29b5f74 (diff) | |
| download | oreilly-epub-a349fd340fc63c476358237dbcaafb6ed4a2c521.tar.gz oreilly-epub-a349fd340fc63c476358237dbcaafb6ed4a2c521.zip | |
Remove unused models, fields, and endpoints
If they are ever needed again, they can be added back.
Also set API endpoints fields to Url type instead of String.
Diffstat (limited to 'src/main.rs')
| -rw-r--r-- | src/main.rs | 24 |
1 files changed, 3 insertions, 21 deletions
diff --git a/src/main.rs b/src/main.rs index 7b73bf3..8712d4f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,7 @@ use std::path::PathBuf; use crate::epub::{create_epub_archive, download_all_files}; use crate::http_client::build_authenticated_client; -use crate::models::{Chapter, EpubResponse, FileEntry, Paginated, SpineItem, TocNode}; +use crate::models::{Chapter, EpubResponse, FileEntry, Paginated}; use anyhow::{Context, Result, anyhow}; use clap::Parser; use directories::{BaseDirs, UserDirs}; @@ -44,24 +44,8 @@ async fn fetch_epub_data(client: &Client, bookid: &str) -> Result<EpubResponse> Ok(response) } -/// Fetches a direct array endpoint (no pagination, simple list). -async fn fetch_direct_array<T>(client: &Client, url: &str) -> Result<Vec<T>> -where - T: serde::de::DeserializeOwned, -{ - let response = client - .get(url) - .send() - .await? - .error_for_status()? - .json::<Vec<T>>() - .await - .context("Failed to deserialize API response")?; - Ok(response) -} - /// Fetch a paginated API. -async fn fetch_all_pages<T>(client: &reqwest::Client, mut url: String) -> Result<Vec<T>> +async fn fetch_all_pages<T>(client: &reqwest::Client, mut url: url::Url) -> Result<Vec<T>> where T: serde::de::DeserializeOwned, { @@ -69,7 +53,7 @@ where loop { // GET current URL and deserialize into Paginated<T>. let response = client - .get(&url) + .get(url) .send() .await? .error_for_status()? @@ -144,8 +128,6 @@ async fn main() -> Result<()> { let chapters: HashMap<String, Chapter> = chapters.into_iter().map(|c| (c.ourn.clone(), c)).collect(); let file_entries: Vec<FileEntry> = fetch_all_pages(&client, epub_data.files.clone()).await?; - let spine_items: Vec<SpineItem> = fetch_all_pages(&client, epub_data.spine.clone()).await?; - let toc_vec: Vec<TocNode> = fetch_direct_array(&client, &epub_data.table_of_contents).await?; let epub_root = data_root.join("files").join(&args.bookid); if !args.skip_download { |
