Compare commits

...

5 commits

Author SHA1 Message Date
8fa70d1300 readme: documentar warcificator 2024-01-01 02:14:17 -03:00
17ffc88097 auto: usar warcificator 2024-01-01 01:53:15 -03:00
c08a6c4a3b compress+other changes 2023-12-31 20:27:33 -03:00
4535054415 printear y no crashear en reqwest error 2023-12-31 20:09:32 -03:00
aedc9c1ed0 safely unwrap socket & concurrent to 2023-12-31 19:59:11 -03:00
4 changed files with 120 additions and 134 deletions

View file

@ -8,7 +8,7 @@ scrapeo "masivo" de precios y datos en supermercados argentinos
(no hace falta correrlos porque ya hay listas armadas en [data/](./data/)) (no hace falta correrlos porque ya hay listas armadas en [data/](./data/))
- se usa wget (potencialmente reemplazado por algo custom en el futuro) que genera un archivo [WARC](https://iipc.github.io/warc-specifications/specifications/warc-format/warc-1.0/) con todas las paginas de productos - [warcificator](./warcificator/) descarga las paginas de productos y genera un archivo [WARC](https://iipc.github.io/warc-specifications/specifications/warc-format/warc-1.0/) con ellas
- el [scraper](./scraper/) procesa estos WARCs, extrayendo varios datos y guardandolos en una base de datos SQLite (definida en [db-datos](./db-datos/schema.ts)) - el [scraper](./scraper/) procesa estos WARCs, extrayendo varios datos y guardandolos en una base de datos SQLite (definida en [db-datos](./db-datos/schema.ts))
- el [sitio](./sitio/) renderiza páginas a partir de la base de datos y hace gráficos lindos - el [sitio](./sitio/) renderiza páginas a partir de la base de datos y hace gráficos lindos

View file

@ -8,12 +8,27 @@ RUN bun install --frozen-lockfile \
&& bun build scraper/cli.ts --target=bun --outfile=/tmp/cli.build.js \ && bun build scraper/cli.ts --target=bun --outfile=/tmp/cli.build.js \
&& rm -rf node_modules/ && rm -rf node_modules/
# https://dev.to/deciduously/use-multi-stage-docker-builds-for-statically-linked-rust-binaries-3jgd
FROM docker.io/rust:1.74 AS warcificator-builder
WORKDIR /usr/src/
RUN rustup target add x86_64-unknown-linux-musl
RUN apt-get update && apt-get install -y musl-tools musl-dev
RUN USER=root cargo new warcificator
WORKDIR /usr/src/warcificator
COPY ./warcificator/Cargo.toml ./warcificator/Cargo.lock ./
RUN cargo build --release
COPY ./warcificator/src ./src
RUN cargo install --target x86_64-unknown-linux-musl --path .
FROM base FROM base
RUN apk add --no-cache wget zstd tini RUN apk add --no-cache wget zstd tini
RUN printf "#!/bin/sh\nexec bun /bin/scraper auto\n" > /etc/periodic/daily/scraper \ RUN printf "#!/bin/sh\nexec bun /bin/scraper auto\n" > /etc/periodic/daily/scraper \
&& chmod +x /etc/periodic/daily/scraper && chmod +x /etc/periodic/daily/scraper
COPY --from=builder /tmp/cli.build.js /bin/scraper COPY --from=builder /tmp/cli.build.js /bin/scraper
COPY --from=warcificator-builder /usr/local/cargo/bin/warcificator /bin/
COPY --from=builder /usr/src/app/db-datos/drizzle /bin/drizzle COPY --from=builder /usr/src/app/db-datos/drizzle /bin/drizzle
COPY --from=builder /usr/src/app/data /listas COPY --from=builder /usr/src/app/data /listas
WORKDIR /app WORKDIR /app

View file

@ -22,9 +22,6 @@ const supermercados: Supermercado[] = [
Supermercado.Dia, Supermercado.Dia,
]; ];
// hacemos una cola para la compresión para no sobrecargar la CPU
const compressionQueue = new PQueue({ concurrency: 1 });
// hacemos una cola para el scrapeo para no tener varios writers a la BD y no sobrecargar la CPU // hacemos una cola para el scrapeo para no tener varios writers a la BD y no sobrecargar la CPU
const scrapQueue = new PQueue({ concurrency: 1 }); const scrapQueue = new PQueue({ concurrency: 1 });
@ -77,7 +74,7 @@ class Auto {
} }
async downloadList(supermercado: Supermercado) { async downloadList(supermercado: Supermercado) {
const ctxPath = await mkdtemp(join(tmpdir(), "preciazo-scraper-wget-")); const ctxPath = await mkdtemp(join(tmpdir(), "preciazo-scraper-download-"));
let listPath: string; let listPath: string;
{ {
@ -117,15 +114,7 @@ class Auto {
)}.warc.zst`; )}.warc.zst`;
const zstdWarcPath = join(ctxPath, zstdWarcName); const zstdWarcPath = join(ctxPath, zstdWarcName);
const subproc = Bun.spawn({ const subproc = Bun.spawn({
cmd: [ cmd: ["warcificator", listPath, zstdWarcPath],
"wget",
"--no-verbose",
"--tries=3",
"--delete-after",
"--input-file",
listPath,
`--warc-file=temp`,
],
stderr: "ignore", stderr: "ignore",
stdout: "ignore", stdout: "ignore",
cwd: ctxPath, cwd: ctxPath,
@ -133,18 +122,9 @@ class Auto {
const t0 = performance.now(); const t0 = performance.now();
await subproc.exited; await subproc.exited;
this.inform( this.inform(
`[wget] ${zstdWarcName} tardó ${formatMs(performance.now() - t0)}` `[downloader] ${zstdWarcName} tardó ${formatMs(performance.now() - t0)}`
); );
const gzippedWarcPath = join(ctxPath, "temp.warc.gz");
if (!(await fileExists(gzippedWarcPath))) {
const err = this.report(`no encontré el ${gzippedWarcPath}`);
throw err;
}
await compressionQueue.add(() =>
this.recompress(gzippedWarcPath, zstdWarcPath)
);
if (!(await fileExists(zstdWarcPath))) { if (!(await fileExists(zstdWarcPath))) {
const err = this.report(`no encontré el ${zstdWarcPath}`); const err = this.report(`no encontré el ${zstdWarcPath}`);
throw err; throw err;
@ -190,49 +170,6 @@ class Auto {
} }
} }
/**
* toma un archivo gzippeado y lo recomprime con zstd.
* borra el archivo original.
*/
recompress(inputPath: string, outputPath: string) {
// XXX: por alguna razón no funciona en Bun 1.0.20
// const decompressor = Bun.spawn({
// cmd: ["gzip", "-dc", inputPath],
// stderr: "inherit",
// });
// const compressor = Bun.spawn({
// cmd: ["zstd", "-T0", "-15", "--long", "-o", outputPath],
// stdin: decompressor.stdout,
// // stderr: "inherit",
// });
// const errorCode = await compressor.exited;
// if (errorCode !== 0) {
// const err = report(`zstd threw error code ${errorCode}`);
// throw err;
// }
return new Promise((resolve, reject) => {
const decompressor = spawn("gzip", ["-dc", inputPath], {
stdio: [null, "pipe", null],
});
const compressor = spawn(
"zstd",
["-T0", "-15", "--long", "-o", outputPath],
{
stdio: ["pipe", null, null],
}
);
decompressor.stdout.pipe(compressor.stdin);
compressor.on("close", (code) => {
if (code !== 0) {
const err = this.report(`zstd threw error code ${code}`);
reject(err);
}
resolve(void 0);
});
});
}
async uploadToBucket({ async uploadToBucket({
fileName, fileName,
file, file,
@ -278,7 +215,6 @@ class Auto {
await fetch(url); await fetch(url);
} }
} }
// await recompress("sqlite.db.gz", "sqlite.db.zst");
// no se llama exists porque bun tiene un bug en el que usa fs.exists por mas que exista una funcion llamada exists // no se llama exists porque bun tiene un bug en el que usa fs.exists por mas que exista una funcion llamada exists
async function fileExists(path: string) { async function fileExists(path: string) {

View file

@ -1,16 +1,24 @@
use async_channel::{Receiver, Sender}; use async_channel::{Receiver, Sender};
use std::{env::args, fs, io::stdout, net::SocketAddr}; use std::{
env::args,
fs,
net::SocketAddr,
process::{Command, Stdio},
};
use tokio::io::{stderr, AsyncWriteExt};
use warc::{RecordBuilder, WarcHeader, WarcWriter}; use warc::{RecordBuilder, WarcHeader, WarcWriter};
struct FullExchange { struct FullExchange {
socket_addr: SocketAddr, socket_addr: Option<SocketAddr>,
request: http::Request<&'static str>, request: http::Request<&'static str>,
response: http::Response<Vec<u8>>, response: http::Response<Vec<u8>>,
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let links_list_path = args().skip(1).next().unwrap(); let mut args = args().skip(1);
let links_list_path = args.next().unwrap();
let output_zstd_path = args.next().unwrap();
let links_str = fs::read_to_string(links_list_path).unwrap(); let links_str = fs::read_to_string(links_list_path).unwrap();
let links = links_str let links = links_str
.split("\n") .split("\n")
@ -18,6 +26,7 @@ async fn main() {
.filter(|s| s.len() > 0) .filter(|s| s.len() > 0)
.map(|s| s.to_owned()) .map(|s| s.to_owned())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let handle = { let handle = {
let (sender, receiver) = async_channel::bounded::<String>(1); let (sender, receiver) = async_channel::bounded::<String>(1);
let (res_sender, res_receiver) = async_channel::unbounded::<FullExchange>(); let (res_sender, res_receiver) = async_channel::unbounded::<FullExchange>();
@ -29,7 +38,7 @@ async fn main() {
handles.push(tokio::spawn(worker(rx, tx))); handles.push(tokio::spawn(worker(rx, tx)));
} }
let warc_writer_handle = tokio::spawn(warc_writer(res_receiver)); let warc_writer_handle = tokio::spawn(warc_writer(res_receiver, output_zstd_path));
for link in links { for link in links {
sender.send_blocking(link).unwrap(); sender.send_blocking(link).unwrap();
@ -48,6 +57,22 @@ async fn main() {
async fn worker(rx: Receiver<String>, tx: Sender<FullExchange>) { async fn worker(rx: Receiver<String>, tx: Sender<FullExchange>) {
let client = reqwest::ClientBuilder::default().build().unwrap(); let client = reqwest::ClientBuilder::default().build().unwrap();
while let Ok(url) = rx.recv().await { while let Ok(url) = rx.recv().await {
let res = fetch(&client, url.clone()).await;
match res {
Ok(ex) => {
tx.send(ex).await.unwrap();
}
Err(err) => {
stderr()
.write_all(format!("Failed to fetch {}: {:#?}", url.as_str(), err).as_bytes())
.await
.unwrap();
}
}
}
}
async fn fetch(client: &reqwest::Client, url: String) -> Result<FullExchange, reqwest::Error> {
let request = client.get(url).build().unwrap(); let request = client.get(url).build().unwrap();
let mut http_request_builder = http::Request::builder() let mut http_request_builder = http::Request::builder()
.method(request.method()) .method(request.method())
@ -55,9 +80,9 @@ async fn worker(rx: Receiver<String>, tx: Sender<FullExchange>) {
for (key, val) in request.headers() { for (key, val) in request.headers() {
http_request_builder = http_request_builder.header(key, val); http_request_builder = http_request_builder.header(key, val);
} }
let response = client.execute(request).await.unwrap(); let response = client.execute(request).await?;
let ip_address = response.remote_addr().unwrap(); let ip_address = response.remote_addr();
let http_request = { let http_request = {
http_request_builder http_request_builder
@ -73,65 +98,75 @@ async fn worker(rx: Receiver<String>, tx: Sender<FullExchange>) {
for (key, val) in response.headers() { for (key, val) in response.headers() {
http_response_builder = http_response_builder.header(key, val); http_response_builder = http_response_builder.header(key, val);
} }
let body = response.bytes().await.unwrap(); let body = response.bytes().await?;
http_response_builder.body(body.to_vec()).unwrap() http_response_builder.body(body.to_vec()).unwrap()
}; };
tx.send(FullExchange { Ok(FullExchange {
socket_addr: ip_address, socket_addr: ip_address,
request: http_request, request: http_request,
response: http_response, response: http_response,
}) })
.await
.unwrap();
}
} }
async fn warc_writer(rx: Receiver<FullExchange>) { async fn warc_writer(rx: Receiver<FullExchange>, output_zstd_path: String) {
let mut writer = WarcWriter::new(stdout()); let zstd_proc = Command::new("zstd")
let warc_fields = format!("software: preciazo-warcificator/0.0.0\nformat: WARC file version 1.0\nconformsTo: http://www.archive.org/documents/WarcFileFormat-1.0.html"); .args(&["-T0", "-15", "--long", "-o", &output_zstd_path])
.stdin(Stdio::piped())
.stderr(Stdio::null())
.stdout(Stdio::null())
.spawn()
.unwrap();
let mut writer = WarcWriter::new(zstd_proc.stdin.unwrap());
writer writer
.write( .write(
&RecordBuilder::default() &RecordBuilder::default()
.version("1.0".to_owned()) .version("1.0".to_owned())
.warc_type(warc::RecordType::WarcInfo) .warc_type(warc::RecordType::WarcInfo)
.header(WarcHeader::ContentType, "application/warc-fields") .header(WarcHeader::ContentType, "application/warc-fields")
.body(warc_fields.into()) .body(format!("software: preciazo-warcificator/0.0.0\nformat: WARC file version 1.0\nconformsTo: http://www.archive.org/documents/WarcFileFormat-1.0.html").into())
.build() .build()
.unwrap(), .unwrap(),
) )
.unwrap(); .unwrap();
while let Ok(res) = rx.recv().await { while let Ok(res) = rx.recv().await {
let uri = res.request.uri().to_string(); let uri = res.request.uri().to_string();
writer let req_record = {
.write( let mut builder = RecordBuilder::default()
&RecordBuilder::default()
.version("1.0".to_owned()) .version("1.0".to_owned())
.warc_type(warc::RecordType::Request) .warc_type(warc::RecordType::Request)
.header(WarcHeader::TargetURI, uri.clone()) .header(WarcHeader::TargetURI, uri.clone())
.header(WarcHeader::IPAddress, res.socket_addr.ip().to_string())
.header(WarcHeader::ContentType, "application/http;msgtype=request") .header(WarcHeader::ContentType, "application/http;msgtype=request")
.header( .header(
WarcHeader::Unknown("X-Warcificator-Lying".to_string()), WarcHeader::Unknown("X-Warcificator-Lying".to_string()),
"the request contains other headers not included here", "the request contains other headers not included here",
) );
if let Some(addr) = res.socket_addr {
builder = builder.header(WarcHeader::IPAddress, addr.ip().to_string());
}
builder
.body(format_http11_request(res.request).into_bytes()) .body(format_http11_request(res.request).into_bytes())
.build() .build()
.unwrap(), .unwrap()
) };
.unwrap(); writer.write(&req_record).unwrap();
writer writer
.write( .write(&{
&RecordBuilder::default() let mut builder = RecordBuilder::default()
.version("1.0".to_owned()) .version("1.0".to_owned())
.warc_type(warc::RecordType::Response) .warc_type(warc::RecordType::Response)
.header(WarcHeader::TargetURI, uri) .header(WarcHeader::TargetURI, uri)
.header(WarcHeader::IPAddress, res.socket_addr.ip().to_string()) .header(WarcHeader::ConcurrentTo, req_record.warc_id())
.header(WarcHeader::ContentType, "application/http;msgtype=response") .header(WarcHeader::ContentType, "application/http;msgtype=response");
if let Some(addr) = res.socket_addr {
builder = builder.header(WarcHeader::IPAddress, addr.ip().to_string());
}
builder
.body(format_http11_response(res.response)) .body(format_http11_response(res.response))
.build() .build()
.unwrap(), .unwrap()
) })
.unwrap(); .unwrap();
} }
} }