Compare commits

...

10 commits

5 changed files with 177 additions and 103 deletions

2
.gitignore vendored
View file

@ -1,3 +1,5 @@
node_modules/
dataJsons/
log
prueba
datos.gob.ar*

View file

@ -1,28 +1,17 @@
// @ts-check
import { mkdir, open } from "node:fs/promises";
import { Agent, fetch } from "undici";
import { mkdir, open, writeFile } from "node:fs/promises";
import { Agent, fetch, request, setGlobalDispatcher } from "undici";
import { join, normalize } from "node:path";
import { pipeline } from "node:stream/promises";
// lista de dominios que permitimos usar http: porque tienen HTTPS roto..
const brokenHttps = [
"datos.mindef.gov.ar", // cert para otro dominio
"datos.energia.gob.ar", // cert para otro dominio
"datos.minem.gob.ar", // vencido 2022-17-06
"datos.agroindustria.gob.ar", // vencido 2022-03-10
"andino.siu.edu.ar", // self signed, igual parece que todo tira 404 en este..
"datos.salud.gob.ar", // timeout en HTTPS
"datos.jus.gob.ar", // HTTPS redirige incorrectamente a URLs inexistentes
"www.hidro.gob.ar", // no HTTPS
];
// FYI: al menos los siguientes dominios no tienen la cadena completa de certificados en HTTPS. tenemos que usar un hack (node_extra_ca_certs_mozilla_bundle) para conectarnos a estos sitios. (se puede ver con ssllabs.com) ojalá lxs administradorxs de estos servidores lo arreglen.
// www.enargas.gov.ar, transparencia.enargas.gov.ar, www.energia.gob.ar, www.economia.gob.ar, datos.yvera.gob.ar
const dispatcher = new Agent({
pipelining: 10,
maxRedirections: 20,
});
setGlobalDispatcher(
new Agent({
pipelining: 0,
})
);
class StatusCodeError extends Error {
/**
@ -33,107 +22,142 @@ class StatusCodeError extends Error {
this.code = code;
}
}
class TooManyRedirectsError extends Error {}
const outputPath = process.argv[2];
if (!outputPath) {
console.error("Especificamente el output porfa");
let jsonUrlString = process.argv[2];
if (!jsonUrlString) {
console.error("Especificamente el url al json porfa");
process.exit(1);
}
const jsonUrl = new URL(jsonUrlString);
const outputPath = jsonUrl.host;
await mkdir(outputPath, { recursive: true });
const errorFile = await open(join(outputPath, "errors.jsonl"), "w");
// Leer JSON de stdin
const json = await process.stdin.toArray();
const jsonString = json.join("");
/** @type {{ dataset: Dataset[] }} */
const parsed = JSON.parse(jsonString);
const jsonRes = await fetch(jsonUrl);
// prettier-ignore
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
const jobs = parsed.dataset.flatMap((dataset) =>
dataset.distribution.map((dist) => ({ dataset, dist })),
dataset.distribution.map((dist) => ({
dataset,
dist,
url: patchUrl(new URL(dist.downloadURL)),
}))
);
// forma barrani de distribuir carga entre servidores
shuffleArray(jobs);
const totalJobs = jobs.length;
let nFinished = 0;
let nErrors = 0;
const duplicated = hasDuplicates(
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`),
);
if (duplicated) {
console.error(
"ADVERTENCIA: ¡encontré duplicados! es posible que se pisen archivos entre si",
);
// por las dudas verificar que no hayan archivos duplicados
chequearIdsDuplicados();
/** @type {Map< string, DownloadJob[] >} */
let jobsPerHost = new Map();
for (const job of jobs) {
jobsPerHost.set(job.url.host, [
...(jobsPerHost.get(job.url.host) || []),
job,
]);
}
const greens = Array(128)
.fill(0)
.map(() =>
(async () => {
let job;
while ((job = jobs.pop())) {
const { dataset, dist } = job;
request: do {
const greens = [...jobsPerHost.entries()].flatMap(([host, jobs]) => {
const nThreads = 8;
return Array(nThreads)
.fill(0)
.map(() =>
(async () => {
let job;
while ((job = jobs.pop())) {
try {
await downloadDist(dataset, dist);
await downloadDistWithRetries(job);
} catch (error) {
if (error instanceof StatusCodeError) {
// algunos servidores usan 403 como coso para decir "calmate"
if (error.code === 403) {
console.debug(
`debug: reintentando ${dist.downloadURL} porque tiró 403`,
);
await wait(15000);
continue request;
}
error = error.toString();
}
console.error(
`error: Failed to download URL ${dist.downloadURL} (${dataset.identifier}/${dist.identifier}):`,
error,
await errorFile.write(
JSON.stringify({
url: job.url.toString(),
...encodeError(error),
}) + "\n"
);
if (!(error instanceof StatusCodeError)) continue request;
nErrors++;
} finally {
nFinished++;
}
} while (0);
}
})(),
);
}
})()
);
});
process.stderr.write(`greens: ${greens.length}\n`);
const interval = setInterval(() => {
console.info(`info: ${nFinished}/${totalJobs} done`);
}, 15000);
process.stderr.write(`info: ${nFinished}/${totalJobs} done\n`);
}, 30000);
await Promise.all(greens);
clearInterval(interval);
if (nErrors > 0) console.error(`Finished with ${nErrors} errors`);
/**
* @argument {Dataset} dataset
* @argument {Distribution} dist
* @argument {DownloadJob} job
* @argument {number} tries
*/
async function downloadDist(dataset, dist) {
const url = new URL(dist.downloadURL);
async function downloadDistWithRetries(job, tries = 0) {
const { url } = job;
try {
await downloadDist(job);
} catch (error) {
// algunos servidores usan 403 como coso para decir "calmate"
// intentar hasta 15 veces con 15 segundos de por medio
if (
error instanceof StatusCodeError &&
error.code === 403 &&
url.host === "minsegar-my.sharepoint.com" &&
tries < 15
) {
await wait(15000);
return await downloadDistWithRetries(job, tries + 1);
}
// si no fue un error de http, reintentar hasta 5 veces con 5 segundos de por medio
else if (
!(error instanceof StatusCodeError) &&
!(error instanceof TooManyRedirectsError) &&
tries < 5
) {
await wait(5000);
return await downloadDistWithRetries(job, tries + 1);
} else throw error;
}
}
// Siempre usar HTTPS excepto cuando está roto
if (brokenHttps.includes(url.host)) {
url.protocol = "http:";
// console.debug(url);
} else url.protocol = "https:";
/**
* @argument {DownloadJob} job
*/
async function downloadDist({ dist, dataset, url }) {
// sharepoint no le gusta compartir a bots lol
const spoofUserAgent = url.host.endsWith("sharepoint.com");
const res = await fetch(url.toString(), {
dispatcher,
const res = await request(url.toString(), {
maxRedirections: 20,
headers: {
"User-Agent": spoofUserAgent
? "Mozilla/5.0 (X11; Linux x86_64; rv:120.0) Gecko/20100101 Firefox/120.0"
: "transicion-desordenada (https://nulo.ar)",
},
});
if (res.status >= 400) {
throw new StatusCodeError(res.status);
if (res.statusCode >= 300 && res.statusCode <= 399)
throw new TooManyRedirectsError();
if (res.statusCode < 200 || res.statusCode > 299) {
throw new StatusCodeError(res.statusCode);
}
const fileDirPath = join(
outputPath,
sanitizeSuffix(dataset.identifier),
sanitizeSuffix(dist.identifier),
sanitizeSuffix(dist.identifier)
);
await mkdir(fileDirPath, { recursive: true });
const filePath = join(
fileDirPath,
sanitizeSuffix(dist.fileName || dist.identifier),
sanitizeSuffix(dist.fileName || dist.identifier)
);
const outputFile = await open(filePath, "w");
@ -141,11 +165,16 @@ async function downloadDist(dataset, dist) {
await pipeline(res.body, outputFile.createWriteStream());
}
/** @typedef {object} Dataset
/** @typedef DownloadJob
* @prop {Dataset} dataset
* @prop {Distribution} dist
* @prop {URL} url
*/
/** @typedef Dataset
* @prop {string} identifier
* @prop {Distribution[]} distribution
*/
/** @typedef {object} Distribution
/** @typedef Distribution
* @prop {string} identifier
* @prop {string} fileName
* @prop {string} downloadURL
@ -159,31 +188,46 @@ function sanitizeSuffix(path) {
return normalize(path).replace(/^(\.\.(\/|\\|$))+/, "");
}
function chequearIdsDuplicados() {
const duplicated = hasDuplicates(
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
);
if (duplicated) {
console.error(
"ADVERTENCIA: ¡encontré duplicados! es posible que se pisen archivos entre si"
);
}
}
// https://stackoverflow.com/a/7376645
/**
* @argument {any[]} array
*/
/** @argument {any[]} array */
function hasDuplicates(array) {
return new Set(array).size !== array.length;
}
// https://stackoverflow.com/a/12646864
/**
* @argument {any[]} array
*/
function shuffleArray(array) {
for (var i = array.length - 1; i > 0; i--) {
var j = Math.floor(Math.random() * (i + 1));
var temp = array[i];
array[i] = array[j];
array[j] = temp;
}
}
/**
* @argument {number} ms
*/
/** @argument {number} ms */
function wait(ms) {
if (ms < 0) return Promise.resolve();
return new Promise((resolve) => setTimeout(resolve, ms));
}
function encodeError(error) {
if (error instanceof StatusCodeError)
return { kind: "http_error", status_code: error.code };
else if (error instanceof TooManyRedirectsError)
return { kind: "infinite_redirect" };
else {
return { kind: "generic_error", error: error.code || error.message };
}
}
/**
* parchea URLs que se rompen solas
* @param {URL} url
*/
function patchUrl(url) {
if (url.host === "www.ign.gob.ar") {
// por defecto, 'http://www.ign.gob.ar' redirige a 'https://ign.gob.ar' pero su certificado solo aplica para '*.ign.gob.ar'. se sirve todo el contenido correctamente en 'https://www.ign.gob.ar', así que vamos para ahí.
url.protocol = "https:";
}
return url;
}

View file

@ -13,5 +13,8 @@
"dependencies": {
"node_extra_ca_certs_mozilla_bundle": "^1.0.5",
"undici": "^5.28.0"
},
"devDependencies": {
"@types/node": "^20.10.0"
}
}

View file

@ -12,6 +12,11 @@ dependencies:
specifier: ^5.28.0
version: 5.28.0
devDependencies:
'@types/node':
specifier: ^20.10.0
version: 20.10.0
packages:
/@fastify/busboy@2.1.0:
@ -19,6 +24,12 @@ packages:
engines: {node: '>=14'}
dev: false
/@types/node@20.10.0:
resolution: {integrity: sha512-D0WfRmU9TQ8I9PFx9Yc+EBHw+vSpIub4IDvQivcp26PtPrdMGAq5SDcpXEo/epqa/DXotVpekHiLNTg3iaKXBQ==}
dependencies:
undici-types: 5.26.5
dev: true
/asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false
@ -153,6 +164,10 @@ packages:
is-utf8: 0.2.1
dev: false
/undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
dev: true
/undici@5.28.0:
resolution: {integrity: sha512-gM12DkXhlAc5+/TPe60iy9P6ETgVfqTuRJ6aQ4w8RYu0MqKuXhaq3/b86GfzDQnNA3NUO6aUNdvevrKH59D0Nw==}
engines: {node: '>=14.0'}

View file

@ -9,5 +9,15 @@ pnpm install
## correr
```
pnpm run run download_json.js carpeta_output < dataJsons/datos.gob.ar.data.json
pnpm run run download_json.js https://datos.gob.ar/data.json
# guarda en ./datos.gob.ar
```
## formato de repo guardado
- `{dominio de repo}`
- `data.json`
- `errors.jsonl`
- `{identifier de dataset}`
- `{identifier de distribution}`
- `{fileName (o, si no existe, identifier de distribution)}`