mirror of
https://github.com/catdevnull/preciazo.git
synced 2024-11-26 11:36:20 +00:00
Compare commits
10 commits
eaf7cab4cb
...
eb0e62447b
Author | SHA1 | Date | |
---|---|---|---|
eb0e62447b | |||
aae84b3829 | |||
1caba93ad6 | |||
bca2717ac8 | |||
7644adf140 | |||
93dbf388af | |||
cbbfec9e42 | |||
f2692a5f9b | |||
|
d820bcc457 | ||
|
cd6bbbdbe8 |
10 changed files with 1631 additions and 71 deletions
|
@ -8,7 +8,8 @@
|
||||||
"ghcr.io/shyim/devcontainers-features/bun:0": {},
|
"ghcr.io/shyim/devcontainers-features/bun:0": {},
|
||||||
"ghcr.io/devcontainers/features/git-lfs:1": {},
|
"ghcr.io/devcontainers/features/git-lfs:1": {},
|
||||||
"ghcr.io/devcontainers/features/node:1": {},
|
"ghcr.io/devcontainers/features/node:1": {},
|
||||||
"ghcr.io/swift-server-community/swift-devcontainer-features/sqlite:1": {}
|
"ghcr.io/swift-server-community/swift-devcontainer-features/sqlite:1": {},
|
||||||
|
"ghcr.io/devcontainers/features/rust:1": {}
|
||||||
},
|
},
|
||||||
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
|
12
.vscode/launch.json
vendored
12
.vscode/launch.json
vendored
|
@ -4,6 +4,18 @@
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "warcificator",
|
||||||
|
"cwd": "warcificator/",
|
||||||
|
"cargo": {
|
||||||
|
// https://github.com/vadimcn/codelldb/issues/884
|
||||||
|
"args": ["build", "--manifest-path=warcificator/Cargo.toml"]
|
||||||
|
},
|
||||||
|
"args": ["../data/samples/Carrefour.50.txt"],
|
||||||
|
"env": {}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
import pMap from "p-map";
|
import pMap from "p-map";
|
||||||
import { saveUrls } from "db-datos/urlHelpers.js";
|
import { saveUrls } from "db-datos/urlHelpers.js";
|
||||||
|
|
||||||
await scrapBySitemap();
|
|
||||||
|
|
||||||
export async function scrapCarrefourProducts() {
|
export async function scrapCarrefourProducts() {
|
||||||
await scrapBySitemap();
|
await scrapBySitemap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ scrapeo "masivo" de precios y datos en supermercados argentinos
|
||||||
|
|
||||||
(no hace falta correrlos porque ya hay listas armadas en [data/](./data/))
|
(no hace falta correrlos porque ya hay listas armadas en [data/](./data/))
|
||||||
|
|
||||||
- se usa wget (potencialmente reemplazado por algo custom en el futuro) que genera un archivo [WARC](https://iipc.github.io/warc-specifications/specifications/warc-format/warc-1.0/) con todas las paginas de productos
|
- [warcificator](./warcificator/) descarga las paginas de productos y genera un archivo [WARC](https://iipc.github.io/warc-specifications/specifications/warc-format/warc-1.0/) con ellas
|
||||||
- el [scraper](./scraper/) procesa estos WARCs, extrayendo varios datos y guardandolos en una base de datos SQLite (definida en [db-datos](./db-datos/schema.ts))
|
- el [scraper](./scraper/) procesa estos WARCs, extrayendo varios datos y guardandolos en una base de datos SQLite (definida en [db-datos](./db-datos/schema.ts))
|
||||||
- el [sitio](./sitio/) renderiza páginas a partir de la base de datos y hace gráficos lindos
|
- el [sitio](./sitio/) renderiza páginas a partir de la base de datos y hace gráficos lindos
|
||||||
|
|
||||||
|
|
|
@ -8,12 +8,27 @@ RUN bun install --frozen-lockfile \
|
||||||
&& bun build scraper/cli.ts --target=bun --outfile=/tmp/cli.build.js \
|
&& bun build scraper/cli.ts --target=bun --outfile=/tmp/cli.build.js \
|
||||||
&& rm -rf node_modules/
|
&& rm -rf node_modules/
|
||||||
|
|
||||||
|
# https://dev.to/deciduously/use-multi-stage-docker-builds-for-statically-linked-rust-binaries-3jgd
|
||||||
|
FROM docker.io/rust:1.74 AS warcificator-builder
|
||||||
|
WORKDIR /usr/src/
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
RUN apt-get update && apt-get install -y musl-tools musl-dev
|
||||||
|
|
||||||
|
RUN USER=root cargo new warcificator
|
||||||
|
WORKDIR /usr/src/warcificator
|
||||||
|
COPY ./warcificator/Cargo.toml ./warcificator/Cargo.lock ./
|
||||||
|
RUN cargo build --release
|
||||||
|
|
||||||
|
COPY ./warcificator/src ./src
|
||||||
|
RUN cargo install --target x86_64-unknown-linux-musl --path .
|
||||||
|
|
||||||
FROM base
|
FROM base
|
||||||
RUN apk add --no-cache wget zstd tini
|
RUN apk add --no-cache wget zstd tini
|
||||||
RUN printf "#!/bin/sh\nexec bun /bin/scraper auto\n" > /etc/periodic/daily/scraper \
|
RUN printf "#!/bin/sh\nexec bun /bin/scraper auto\n" > /etc/periodic/daily/scraper \
|
||||||
&& chmod +x /etc/periodic/daily/scraper
|
&& chmod +x /etc/periodic/daily/scraper
|
||||||
|
|
||||||
COPY --from=builder /tmp/cli.build.js /bin/scraper
|
COPY --from=builder /tmp/cli.build.js /bin/scraper
|
||||||
|
COPY --from=warcificator-builder /usr/local/cargo/bin/warcificator /bin/
|
||||||
COPY --from=builder /usr/src/app/db-datos/drizzle /bin/drizzle
|
COPY --from=builder /usr/src/app/db-datos/drizzle /bin/drizzle
|
||||||
COPY --from=builder /usr/src/app/data /listas
|
COPY --from=builder /usr/src/app/data /listas
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
|
@ -22,9 +22,6 @@ const supermercados: Supermercado[] = [
|
||||||
Supermercado.Dia,
|
Supermercado.Dia,
|
||||||
];
|
];
|
||||||
|
|
||||||
// hacemos una cola para la compresión para no sobrecargar la CPU
|
|
||||||
const compressionQueue = new PQueue({ concurrency: 1 });
|
|
||||||
|
|
||||||
// hacemos una cola para el scrapeo para no tener varios writers a la BD y no sobrecargar la CPU
|
// hacemos una cola para el scrapeo para no tener varios writers a la BD y no sobrecargar la CPU
|
||||||
const scrapQueue = new PQueue({ concurrency: 1 });
|
const scrapQueue = new PQueue({ concurrency: 1 });
|
||||||
|
|
||||||
|
@ -77,7 +74,7 @@ class Auto {
|
||||||
}
|
}
|
||||||
|
|
||||||
async downloadList(supermercado: Supermercado) {
|
async downloadList(supermercado: Supermercado) {
|
||||||
const ctxPath = await mkdtemp(join(tmpdir(), "preciazo-scraper-wget-"));
|
const ctxPath = await mkdtemp(join(tmpdir(), "preciazo-scraper-download-"));
|
||||||
|
|
||||||
let listPath: string;
|
let listPath: string;
|
||||||
{
|
{
|
||||||
|
@ -117,15 +114,7 @@ class Auto {
|
||||||
)}.warc.zst`;
|
)}.warc.zst`;
|
||||||
const zstdWarcPath = join(ctxPath, zstdWarcName);
|
const zstdWarcPath = join(ctxPath, zstdWarcName);
|
||||||
const subproc = Bun.spawn({
|
const subproc = Bun.spawn({
|
||||||
cmd: [
|
cmd: ["warcificator", listPath, zstdWarcPath],
|
||||||
"wget",
|
|
||||||
"--no-verbose",
|
|
||||||
"--tries=3",
|
|
||||||
"--delete-after",
|
|
||||||
"--input-file",
|
|
||||||
listPath,
|
|
||||||
`--warc-file=temp`,
|
|
||||||
],
|
|
||||||
stderr: "ignore",
|
stderr: "ignore",
|
||||||
stdout: "ignore",
|
stdout: "ignore",
|
||||||
cwd: ctxPath,
|
cwd: ctxPath,
|
||||||
|
@ -133,18 +122,9 @@ class Auto {
|
||||||
const t0 = performance.now();
|
const t0 = performance.now();
|
||||||
await subproc.exited;
|
await subproc.exited;
|
||||||
this.inform(
|
this.inform(
|
||||||
`[wget] ${zstdWarcName} tardó ${formatMs(performance.now() - t0)}`
|
`[downloader] ${zstdWarcName} tardó ${formatMs(performance.now() - t0)}`
|
||||||
);
|
);
|
||||||
|
|
||||||
const gzippedWarcPath = join(ctxPath, "temp.warc.gz");
|
|
||||||
if (!(await fileExists(gzippedWarcPath))) {
|
|
||||||
const err = this.report(`no encontré el ${gzippedWarcPath}`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
await compressionQueue.add(() =>
|
|
||||||
this.recompress(gzippedWarcPath, zstdWarcPath)
|
|
||||||
);
|
|
||||||
if (!(await fileExists(zstdWarcPath))) {
|
if (!(await fileExists(zstdWarcPath))) {
|
||||||
const err = this.report(`no encontré el ${zstdWarcPath}`);
|
const err = this.report(`no encontré el ${zstdWarcPath}`);
|
||||||
throw err;
|
throw err;
|
||||||
|
@ -190,49 +170,6 @@ class Auto {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* toma un archivo gzippeado y lo recomprime con zstd.
|
|
||||||
* borra el archivo original.
|
|
||||||
*/
|
|
||||||
recompress(inputPath: string, outputPath: string) {
|
|
||||||
// XXX: por alguna razón no funciona en Bun 1.0.20
|
|
||||||
// const decompressor = Bun.spawn({
|
|
||||||
// cmd: ["gzip", "-dc", inputPath],
|
|
||||||
// stderr: "inherit",
|
|
||||||
// });
|
|
||||||
// const compressor = Bun.spawn({
|
|
||||||
// cmd: ["zstd", "-T0", "-15", "--long", "-o", outputPath],
|
|
||||||
// stdin: decompressor.stdout,
|
|
||||||
// // stderr: "inherit",
|
|
||||||
// });
|
|
||||||
// const errorCode = await compressor.exited;
|
|
||||||
// if (errorCode !== 0) {
|
|
||||||
// const err = report(`zstd threw error code ${errorCode}`);
|
|
||||||
// throw err;
|
|
||||||
// }
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const decompressor = spawn("gzip", ["-dc", inputPath], {
|
|
||||||
stdio: [null, "pipe", null],
|
|
||||||
});
|
|
||||||
const compressor = spawn(
|
|
||||||
"zstd",
|
|
||||||
["-T0", "-15", "--long", "-o", outputPath],
|
|
||||||
{
|
|
||||||
stdio: ["pipe", null, null],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
decompressor.stdout.pipe(compressor.stdin);
|
|
||||||
compressor.on("close", (code) => {
|
|
||||||
if (code !== 0) {
|
|
||||||
const err = this.report(`zstd threw error code ${code}`);
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
resolve(void 0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async uploadToBucket({
|
async uploadToBucket({
|
||||||
fileName,
|
fileName,
|
||||||
file,
|
file,
|
||||||
|
@ -278,7 +215,6 @@ class Auto {
|
||||||
await fetch(url);
|
await fetch(url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// await recompress("sqlite.db.gz", "sqlite.db.zst");
|
|
||||||
|
|
||||||
// no se llama exists porque bun tiene un bug en el que usa fs.exists por mas que exista una funcion llamada exists
|
// no se llama exists porque bun tiene un bug en el que usa fs.exists por mas que exista una funcion llamada exists
|
||||||
async function fileExists(path: string) {
|
async function fileExists(path: string) {
|
||||||
|
|
|
@ -1,8 +1,17 @@
|
||||||
|
import { scrapCarrefourProducts } from "../carrefour-link-scraper/index.js";
|
||||||
|
import { scrapCotoProducts } from "../coto-link-scraper/index.js";
|
||||||
|
import { scrapDiaProducts } from "../dia-link-scraper/index.js";
|
||||||
import { auto } from "./auto.js";
|
import { auto } from "./auto.js";
|
||||||
import { parseWarc } from "./scrap.js";
|
import { parseWarc } from "./scrap.js";
|
||||||
|
|
||||||
if (process.argv[2] === "auto") {
|
if (process.argv[2] === "auto") {
|
||||||
await auto();
|
await auto();
|
||||||
|
} else if (process.argv[2] === "scrap-carrefour-links") {
|
||||||
|
await scrapCarrefourProducts()
|
||||||
|
} else if (process.argv[2] === "scrap-dia-links") {
|
||||||
|
await scrapDiaProducts()
|
||||||
|
} else if (process.argv[2] === "scrap-coto-links") {
|
||||||
|
await scrapCotoProducts()
|
||||||
} else if (process.argv[2] === "scrap") {
|
} else if (process.argv[2] === "scrap") {
|
||||||
const warcPaths = process.argv.slice(3);
|
const warcPaths = process.argv.slice(3);
|
||||||
if (warcPaths.length > 0) {
|
if (warcPaths.length > 0) {
|
||||||
|
|
1373
warcificator/Cargo.lock
generated
Normal file
1373
warcificator/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
17
warcificator/Cargo.toml
Normal file
17
warcificator/Cargo.toml
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
[package]
|
||||||
|
name = "warcificator"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
async-channel = "2.1.1"
|
||||||
|
http = "0.2.11"
|
||||||
|
reqwest = { version = "0.11.23", default-features = false, features = [
|
||||||
|
"rustls-tls",
|
||||||
|
"gzip",
|
||||||
|
"brotli",
|
||||||
|
] }
|
||||||
|
tokio = { version = "1.35.1", features = ["full"] }
|
||||||
|
warc = "0.3.1"
|
199
warcificator/src/main.rs
Normal file
199
warcificator/src/main.rs
Normal file
|
@ -0,0 +1,199 @@
|
||||||
|
use async_channel::{Receiver, Sender};
|
||||||
|
use std::{
|
||||||
|
env::args,
|
||||||
|
fs,
|
||||||
|
net::SocketAddr,
|
||||||
|
process::{Command, Stdio},
|
||||||
|
};
|
||||||
|
use tokio::io::{stderr, AsyncWriteExt};
|
||||||
|
use warc::{RecordBuilder, WarcHeader, WarcWriter};
|
||||||
|
|
||||||
|
struct FullExchange {
|
||||||
|
socket_addr: Option<SocketAddr>,
|
||||||
|
request: http::Request<&'static str>,
|
||||||
|
response: http::Response<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let mut args = args().skip(1);
|
||||||
|
let links_list_path = args.next().unwrap();
|
||||||
|
let output_zstd_path = args.next().unwrap();
|
||||||
|
let links_str = fs::read_to_string(links_list_path).unwrap();
|
||||||
|
let links = links_str
|
||||||
|
.split("\n")
|
||||||
|
.map(|s| s.trim())
|
||||||
|
.filter(|s| s.len() > 0)
|
||||||
|
.map(|s| s.to_owned())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let handle = {
|
||||||
|
let (sender, receiver) = async_channel::bounded::<String>(1);
|
||||||
|
let (res_sender, res_receiver) = async_channel::unbounded::<FullExchange>();
|
||||||
|
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
for _ in 1..16 {
|
||||||
|
let rx = receiver.clone();
|
||||||
|
let tx = res_sender.clone();
|
||||||
|
handles.push(tokio::spawn(worker(rx, tx)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let warc_writer_handle = tokio::spawn(warc_writer(res_receiver, output_zstd_path));
|
||||||
|
|
||||||
|
for link in links {
|
||||||
|
sender.send_blocking(link).unwrap();
|
||||||
|
}
|
||||||
|
sender.close();
|
||||||
|
|
||||||
|
for handle in handles {
|
||||||
|
handle.await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
warc_writer_handle
|
||||||
|
};
|
||||||
|
handle.await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn worker(rx: Receiver<String>, tx: Sender<FullExchange>) {
|
||||||
|
let client = reqwest::ClientBuilder::default().build().unwrap();
|
||||||
|
while let Ok(url) = rx.recv().await {
|
||||||
|
let res = fetch(&client, url.clone()).await;
|
||||||
|
match res {
|
||||||
|
Ok(ex) => {
|
||||||
|
tx.send(ex).await.unwrap();
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
stderr()
|
||||||
|
.write_all(format!("Failed to fetch {}: {:#?}", url.as_str(), err).as_bytes())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch(client: &reqwest::Client, url: String) -> Result<FullExchange, reqwest::Error> {
|
||||||
|
let request = client.get(url).build().unwrap();
|
||||||
|
let mut http_request_builder = http::Request::builder()
|
||||||
|
.method(request.method())
|
||||||
|
.uri(request.url().as_str());
|
||||||
|
for (key, val) in request.headers() {
|
||||||
|
http_request_builder = http_request_builder.header(key, val);
|
||||||
|
}
|
||||||
|
let response = client.execute(request).await?;
|
||||||
|
|
||||||
|
let ip_address = response.remote_addr();
|
||||||
|
|
||||||
|
let http_request = {
|
||||||
|
http_request_builder
|
||||||
|
.version(response.version())
|
||||||
|
.body("")
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
let http_response = {
|
||||||
|
let mut http_response_builder = http::Response::<()>::builder()
|
||||||
|
.status(response.status())
|
||||||
|
.version(response.version());
|
||||||
|
for (key, val) in response.headers() {
|
||||||
|
http_response_builder = http_response_builder.header(key, val);
|
||||||
|
}
|
||||||
|
let body = response.bytes().await?;
|
||||||
|
http_response_builder.body(body.to_vec()).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(FullExchange {
|
||||||
|
socket_addr: ip_address,
|
||||||
|
request: http_request,
|
||||||
|
response: http_response,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn warc_writer(rx: Receiver<FullExchange>, output_zstd_path: String) {
|
||||||
|
let zstd_proc = Command::new("zstd")
|
||||||
|
.args(&["-T0", "-15", "--long", "-o", &output_zstd_path])
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stderr(Stdio::null())
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut writer = WarcWriter::new(zstd_proc.stdin.unwrap());
|
||||||
|
writer
|
||||||
|
.write(
|
||||||
|
&RecordBuilder::default()
|
||||||
|
.version("1.0".to_owned())
|
||||||
|
.warc_type(warc::RecordType::WarcInfo)
|
||||||
|
.header(WarcHeader::ContentType, "application/warc-fields")
|
||||||
|
.body(format!("software: preciazo-warcificator/0.0.0\nformat: WARC file version 1.0\nconformsTo: http://www.archive.org/documents/WarcFileFormat-1.0.html").into())
|
||||||
|
.build()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
while let Ok(res) = rx.recv().await {
|
||||||
|
let uri = res.request.uri().to_string();
|
||||||
|
let req_record = {
|
||||||
|
let mut builder = RecordBuilder::default()
|
||||||
|
.version("1.0".to_owned())
|
||||||
|
.warc_type(warc::RecordType::Request)
|
||||||
|
.header(WarcHeader::TargetURI, uri.clone())
|
||||||
|
.header(WarcHeader::ContentType, "application/http;msgtype=request")
|
||||||
|
.header(
|
||||||
|
WarcHeader::Unknown("X-Warcificator-Lying".to_string()),
|
||||||
|
"the request contains other headers not included here",
|
||||||
|
);
|
||||||
|
if let Some(addr) = res.socket_addr {
|
||||||
|
builder = builder.header(WarcHeader::IPAddress, addr.ip().to_string());
|
||||||
|
}
|
||||||
|
builder
|
||||||
|
.body(format_http11_request(res.request).into_bytes())
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
writer.write(&req_record).unwrap();
|
||||||
|
writer
|
||||||
|
.write(&{
|
||||||
|
let mut builder = RecordBuilder::default()
|
||||||
|
.version("1.0".to_owned())
|
||||||
|
.warc_type(warc::RecordType::Response)
|
||||||
|
.header(WarcHeader::TargetURI, uri)
|
||||||
|
.header(WarcHeader::ConcurrentTo, req_record.warc_id())
|
||||||
|
.header(WarcHeader::ContentType, "application/http;msgtype=response");
|
||||||
|
if let Some(addr) = res.socket_addr {
|
||||||
|
builder = builder.header(WarcHeader::IPAddress, addr.ip().to_string());
|
||||||
|
}
|
||||||
|
builder
|
||||||
|
.body(format_http11_response(res.response))
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_http11_request(req: http::Request<&'static str>) -> String {
|
||||||
|
let start_line = format!("{} {} HTTP/1.1", req.method().as_str(), req.uri().path());
|
||||||
|
let headers_str = req
|
||||||
|
.headers()
|
||||||
|
.iter()
|
||||||
|
.map(|(key, val)| format!("{}: {}\r\n", key, val.to_str().unwrap()))
|
||||||
|
.collect::<String>();
|
||||||
|
|
||||||
|
[start_line.as_str(), headers_str.as_str(), req.body()].join("\r\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_http11_response(res: http::Response<Vec<u8>>) -> Vec<u8> {
|
||||||
|
let start_line = format!(
|
||||||
|
"HTTP/1.1 {} {}",
|
||||||
|
res.status().as_str(),
|
||||||
|
res.status().canonical_reason().unwrap_or("")
|
||||||
|
);
|
||||||
|
let headers_str = res
|
||||||
|
.headers()
|
||||||
|
.iter()
|
||||||
|
.map(|(key, val)| format!("{}: {}\r\n", key, val.to_str().unwrap()))
|
||||||
|
.collect::<String>();
|
||||||
|
|
||||||
|
let crlf: &[u8] = &[13, 10];
|
||||||
|
[start_line.as_bytes(), headers_str.as_bytes(), res.body()].join(crlf)
|
||||||
|
}
|
Loading…
Reference in a new issue