mirror of
https://github.com/catdevnull/preciazo.git
synced 2024-11-22 14:16:19 +00:00
fix: print errors en best selling + refactor
This commit is contained in:
parent
c946e7fe35
commit
c0c5066284
2 changed files with 47 additions and 28 deletions
|
@ -3,8 +3,9 @@ use std::collections::HashMap;
|
|||
use crate::{build_client, db::Db, sites::vtex, supermercado::Supermercado};
|
||||
use chrono::{DateTime, Utc};
|
||||
use clap::ValueEnum;
|
||||
use futures::{stream, FutureExt, StreamExt, TryStreamExt};
|
||||
use futures::{stream, FutureExt, StreamExt};
|
||||
use itertools::Itertools;
|
||||
use simple_error::SimpleError;
|
||||
use tracing::warn;
|
||||
|
||||
#[derive(ValueEnum, Clone, Debug)]
|
||||
|
@ -77,10 +78,6 @@ async fn try_get_best_selling_eans(
|
|||
}
|
||||
}
|
||||
|
||||
async fn noop<T>(t: T) -> anyhow::Result<T> {
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
fn rank_eans(eans: Vec<Vec<String>>) -> Vec<String> {
|
||||
let mut map: HashMap<String, usize> = HashMap::new();
|
||||
for eans in eans {
|
||||
|
@ -98,34 +95,45 @@ fn rank_eans(eans: Vec<Vec<String>>) -> Vec<String> {
|
|||
|
||||
pub async fn get_all_best_selling(db: &Db) -> anyhow::Result<Vec<BestSellingRecord>> {
|
||||
let client = &build_client();
|
||||
|
||||
stream::iter(Category::value_variants())
|
||||
let records = stream::iter(Category::value_variants())
|
||||
.map(|category| {
|
||||
stream::iter(Supermercado::value_variants())
|
||||
.map(|supermercado| {
|
||||
let db = db.clone();
|
||||
let client = client.clone();
|
||||
tokio::spawn(try_get_best_selling_eans(
|
||||
client,
|
||||
db,
|
||||
client.clone(),
|
||||
db.clone(),
|
||||
supermercado,
|
||||
category,
|
||||
))
|
||||
})
|
||||
.buffer_unordered(5)
|
||||
.map(|f| f.unwrap())
|
||||
.try_filter_map(noop)
|
||||
.try_collect::<Vec<Vec<String>>>()
|
||||
.filter_map(|r| async {
|
||||
match r {
|
||||
Err(err) => {
|
||||
tracing::error!("Error getting best selling: {}", err);
|
||||
None
|
||||
}
|
||||
Ok(v) => v,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Vec<String>>>()
|
||||
.map(|r| {
|
||||
r.map(rank_eans).map(|eans| BestSellingRecord {
|
||||
let ranked = rank_eans(r);
|
||||
BestSellingRecord {
|
||||
fetched_at: Utc::now(),
|
||||
category: category.clone(),
|
||||
eans,
|
||||
})
|
||||
eans: ranked,
|
||||
}
|
||||
})
|
||||
})
|
||||
.buffer_unordered(5)
|
||||
.boxed()
|
||||
.try_collect()
|
||||
.await
|
||||
.collect::<Vec<BestSellingRecord>>()
|
||||
.await;
|
||||
if records.len() < 10 {
|
||||
Err(SimpleError::new("Too few BestSellingRecords").into())
|
||||
} else {
|
||||
Ok(records)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -225,19 +225,30 @@ pub async fn get_best_selling_by_category(
|
|||
url
|
||||
};
|
||||
let body = fetch_body(client, url.as_str()).await?;
|
||||
let urls: Vec<String> = serde_json::from_str::<serde_json::Value>(&body)?
|
||||
tracing::debug!("best selling body: {}", body);
|
||||
let json = &serde_json::from_str::<serde_json::Value>(&body)?;
|
||||
if let Some(errors_array) = json.pointer("/errors") {
|
||||
if let Some(error_messages) = errors_array.as_array().map(|a| {
|
||||
a.into_iter()
|
||||
.map(|obj| obj.get("message").and_then(|v| v.as_str()))
|
||||
.collect_vec()
|
||||
}) {
|
||||
bail!("Errors from API: {:?}", error_messages);
|
||||
} else {
|
||||
bail!("Unknown error from API")
|
||||
}
|
||||
}
|
||||
let urls: Vec<String> = json
|
||||
.pointer("/data/productSearch/products")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|a| {
|
||||
a.iter()
|
||||
.filter_map(|p| {
|
||||
p.get("link")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| format!("https://{}{}", domain, s))
|
||||
})
|
||||
.collect()
|
||||
.ok_or(SimpleError::new("failed to get best selling product urls"))?
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
p.get("link")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| format!("https://{}{}", domain, s))
|
||||
})
|
||||
.ok_or(SimpleError::new("failed to get best selling product urls"))?;
|
||||
.collect();
|
||||
|
||||
if urls.len() < 2 {
|
||||
bail!("Too few best selling");
|
||||
|
|
Loading…
Reference in a new issue