Skip to content

Commit

Permalink
Fix BrowsePageStream model (#239)
Browse files Browse the repository at this point in the history
  • Loading branch information
adumbidiot authored Sep 3, 2024
1 parent 202cd0a commit 653b061
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 9 deletions.
13 changes: 12 additions & 1 deletion deviantart-cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,20 @@ license = "MIT OR Apache-2.0"
[dependencies]
argh = "0.1.12"
anyhow = "1.0.86"
deviantart = { path = "../deviantart" }
deviantart = { path = "../deviantart", default-features = false }
directories-next = "2.0.0"
nd-util = { git = "https://github.com/nathaniel-daniel/nd-util-rs", features = [ "download-to-path" ] }
toml = { version = "0.8.19", features = [ "preserve_order" ] }
tokio = { version = "1.40.0", features = [ "rt-multi-thread", "fs" ] }
serde = { version = "1.0.209", features = [ "derive" ] }

[features]
default = [
"rustls-tls",
]
native-tls = [
"deviantart/native-tls",
]
rustls-tls = [
"deviantart/rustls-tls",
]
16 changes: 12 additions & 4 deletions deviantart/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,9 +302,13 @@ impl SearchCursor {
browse_page_stream
.items
.iter()
.filter_map(|id| {
// TODO: Investigate string format more.
id.as_u64()
})
.map(|id| {
page.get_deviation_by_id(*id)
.ok_or(Error::MissingDeviation(*id))
page.get_deviation_by_id(id)
.ok_or(Error::MissingDeviation(id))
})
.collect(),
)
Expand All @@ -326,9 +330,13 @@ impl SearchCursor {
Some(
items
.iter()
.filter_map(|id| {
// TODO: Investigate string format more.
id.as_u64()
})
.map(|id| {
page.take_deviation_by_id(*id)
.ok_or(Error::MissingDeviation(*id))
page.take_deviation_by_id(id)
.ok_or(Error::MissingDeviation(id))
})
.collect(),
)
Expand Down
11 changes: 7 additions & 4 deletions deviantart/src/types/scraped_webpage_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,7 @@ impl ScrapedWebPageInfo {
/// Parse this from a html string
pub fn from_html_str(input: &str) -> Result<Self, FromHtmlStrError> {
static REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"window\.__INITIAL_STATE__ = JSON\.parse\("(.*)"\);"#)
.expect("invalid `scrape_deviation` regex")
Regex::new(r#"window\.__INITIAL_STATE__ = JSON\.parse\("(.*)"\);"#).unwrap()
});

let capture = REGEX
Expand Down Expand Up @@ -268,8 +267,12 @@ pub struct BrowsePageStream {
#[serde(rename = "hasMore")]
pub has_more: bool,

/// deviation ids
pub items: Vec<u64>,
/// Deviation ids?
///
/// Usually, these are integers representing deviation ids.
/// In some cases, these are strings of the format "xx-nnnnn",
/// where the "xx" part is unknown and the "nnnnn" part is a deviation id.
pub items: Vec<serde_json::Value>,

/// The # of items per page
#[serde(rename = "itemsPerFetch")]
Expand Down

0 comments on commit 653b061

Please sign in to comment.