Compare commits

...

7 commits

8 changed files with 77909 additions and 220 deletions

3
.gitignore vendored
View file

@ -2,4 +2,5 @@ node_modules/
dataJsons/
log
prueba
datos.gob.ar*
datos.gob.ar*
data/

19
Containerfile Normal file
View file

@ -0,0 +1,19 @@
FROM docker.io/alpine:3.18 as build
RUN apk add --no-cache npm esbuild
RUN npm install -g esbuild
WORKDIR /tmp/build
COPY package.json .
RUN npm install
COPY download_json.js .
RUN esbuild --bundle --format=cjs --platform=node --outfile=build.js download_json.js
FROM docker.io/alpine:3.18
RUN apk add --no-cache nodejs-current tini
COPY pki/ca_intermediate_root_bundle.pem /usr/lib/ca_intermediate_root_bundle.pem
COPY --from=build /tmp/build/build.js /usr/local/bin/download_json.js
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
WORKDIR /data
CMD ["/sbin/tini", "node", "/usr/local/bin/download_json.js", "https://datos.gob.ar/data.json", "http://datos.energia.gob.ar/data.json", "https://datos.magyp.gob.ar/data.json", "https://datos.acumar.gov.ar/data.json", "https://datasets.datos.mincyt.gob.ar/data.json", "https://datos.arsat.com.ar/data.json", "https://datos.cultura.gob.ar/data.json", "https://datos.mininterior.gob.ar/data.json", "https://datos.produccion.gob.ar/data.json", "https://datos.salud.gob.ar/data.json", "https://datos.transporte.gob.ar/data.json", "https://ckan.ciudaddemendoza.gov.ar/data.json", "https://datos.santafe.gob.ar/data.json", "https://datosabiertos.chaco.gob.ar/data.json", "https://datosabiertos.gualeguaychu.gov.ar/data.json", "https://datosabiertos.mercedes.gob.ar/data.json", "http://luj-bue-datos.paisdigital.innovacion.gob.ar/data.json", "https://datosabiertos.desarrollosocial.gob.ar", "http://datos.mindef.gov.ar/data.json"]

View file

@ -2,7 +2,7 @@
import { mkdir, open, writeFile } from "node:fs/promises";
import { Agent, fetch, request, setGlobalDispatcher } from "undici";
import { join, normalize } from "node:path";
import { pipeline } from "node:stream/promises";
import pLimit from "p-limit";
// FYI: al menos los siguientes dominios no tienen la cadena completa de certificados en HTTPS. tenemos que usar un hack (node_extra_ca_certs_mozilla_bundle) para conectarnos a estos sitios. (se puede ver con ssllabs.com) ojalá lxs administradorxs de estos servidores lo arreglen.
// www.enargas.gov.ar, transparencia.enargas.gov.ar, www.energia.gob.ar, www.economia.gob.ar, datos.yvera.gob.ar
@ -13,6 +13,11 @@ setGlobalDispatcher(
})
);
/** key es host
* @type {Map<string, import("p-limit").LimitFunction>} */
const limiters = new Map();
const nThreads = process.env.N_THREADS ? parseInt(process.env.N_THREADS) : 8;
class StatusCodeError extends Error {
/**
* @param {number} code
@ -23,84 +28,102 @@ class StatusCodeError extends Error {
}
}
class TooManyRedirectsError extends Error {}
let jsonUrlString = process.argv[2];
if (!jsonUrlString) {
const jsonUrls = process.argv.slice(2);
if (jsonUrls.length < 1) {
console.error("Especificamente el url al json porfa");
process.exit(1);
}
const jsonUrl = new URL(jsonUrlString);
const outputPath = jsonUrl.host;
await mkdir(outputPath, { recursive: true });
const errorFile = await open(join(outputPath, "errors.jsonl"), "w");
for (const url of jsonUrls)
downloadFromData(url).catch((error) =>
console.error(`${url} FALLÓ CON`, error)
);
const jsonRes = await fetch(jsonUrl);
// prettier-ignore
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
/**
* @param {string} jsonUrlString
*/
async function downloadFromData(jsonUrlString) {
const jsonUrl = new URL(jsonUrlString);
const outputPath = jsonUrl.host;
await mkdir(outputPath, { recursive: true });
const errorFile = (
await open(join(outputPath, "errors.jsonl"), "w")
).createWriteStream();
const jobs = parsed.dataset.flatMap((dataset) =>
dataset.distribution.map((dist) => ({
dataset,
dist,
url: patchUrl(new URL(dist.downloadURL)),
}))
);
const totalJobs = jobs.length;
let nFinished = 0;
let nErrors = 0;
try {
const jsonRes = await fetch(jsonUrl);
// prettier-ignore
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
// por las dudas verificar que no hayan archivos duplicados
chequearIdsDuplicados();
/** @type {Map< string, DownloadJob[] >} */
let jobsPerHost = new Map();
for (const job of jobs) {
jobsPerHost.set(job.url.host, [
...(jobsPerHost.get(job.url.host) || []),
job,
]);
}
const greens = [...jobsPerHost.entries()].flatMap(([host, jobs]) => {
const nThreads = 8;
return Array(nThreads)
.fill(0)
.map(() =>
(async () => {
let job;
while ((job = jobs.pop())) {
/** @type {DownloadJob[]} */
const jobs = parsed.dataset.flatMap((dataset) =>
dataset.distribution
.filter((dist) => {
try {
await downloadDistWithRetries(job);
patchUrl(new URL(dist.downloadURL));
return true;
} catch (error) {
await errorFile.write(
JSON.stringify({
url: job.url.toString(),
...encodeError(error),
}) + "\n"
errorFile.write(
JSON.stringify(encodeError({ dataset, dist }, error)) + "\n"
);
nErrors++;
} finally {
nFinished++;
return false;
}
}
})()
})
.map((dist) => ({
dataset,
dist,
url: patchUrl(new URL(dist.downloadURL)),
outputPath,
attempts: 0,
}))
);
});
process.stderr.write(`greens: ${greens.length}\n`);
const totalJobs = jobs.length;
let nFinished = 0;
let nErrors = 0;
const interval = setInterval(() => {
process.stderr.write(`info: ${nFinished}/${totalJobs} done\n`);
}, 30000);
await Promise.all(greens);
clearInterval(interval);
if (nErrors > 0) console.error(`Finished with ${nErrors} errors`);
// por las dudas verificar que no hayan archivos duplicados
chequearIdsDuplicados(jobs);
shuffleArray(jobs);
const promises = jobs.map((job) => {
let limit = limiters.get(job.url.host);
if (!limit) {
limit = pLimit(nThreads);
limiters.set(job.url.host, limit);
}
return limit(async () => {
try {
await downloadDistWithRetries(job);
} catch (error) {
await errorFile.write(JSON.stringify(job, encodeError(error)) + "\n");
nErrors++;
} finally {
nFinished++;
}
});
});
process.stderr.write(`info[${jsonUrl.host}]: 0/${totalJobs} done\n`);
const interval = setInterval(() => {
process.stderr.write(
`info[${jsonUrl.host}]: ${nFinished}/${totalJobs} done\n`
);
}, 30000);
await Promise.all(promises);
clearInterval(interval);
if (nErrors > 0)
console.error(`${jsonUrl.host}: Finished with ${nErrors} errors`);
} finally {
errorFile.close();
}
}
/**
* @argument {DownloadJob} job
* @argument {number} tries
* @argument {number} attempts
*/
async function downloadDistWithRetries(job, tries = 0) {
async function downloadDistWithRetries(job, attempts = 0) {
const { url } = job;
try {
await downloadDist(job);
@ -111,19 +134,19 @@ async function downloadDistWithRetries(job, tries = 0) {
error instanceof StatusCodeError &&
error.code === 403 &&
url.host === "minsegar-my.sharepoint.com" &&
tries < 15
attempts < 15
) {
await wait(15000);
return await downloadDistWithRetries(job, tries + 1);
return await downloadDistWithRetries(job, attempts + 1);
}
// si no fue un error de http, reintentar hasta 5 veces con 5 segundos de por medio
else if (
!(error instanceof StatusCodeError) &&
!(error instanceof TooManyRedirectsError) &&
tries < 5
attempts < 10
) {
await wait(5000);
return await downloadDistWithRetries(job, tries + 1);
return await downloadDistWithRetries(job, attempts + 1);
} else throw error;
}
}
@ -131,7 +154,7 @@ async function downloadDistWithRetries(job, tries = 0) {
/**
* @argument {DownloadJob} job
*/
async function downloadDist({ dist, dataset, url }) {
async function downloadDist({ dist, dataset, url, outputPath }) {
// sharepoint no le gusta compartir a bots lol
const spoofUserAgent = url.host.endsWith("sharepoint.com");
@ -159,16 +182,18 @@ async function downloadDist({ dist, dataset, url }) {
fileDirPath,
sanitizeSuffix(dist.fileName || dist.identifier)
);
const outputFile = await open(filePath, "w");
if (!res.body) throw new Error("no body");
await pipeline(res.body, outputFile.createWriteStream());
await writeFile(filePath, res.body);
}
/** @typedef DownloadJob
* @prop {Dataset} dataset
* @prop {Distribution} dist
* @prop {URL} url
* @prop {string} outputPath
* @prop {number} attempts
* @prop {Date=} waitUntil
*/
/** @typedef Dataset
* @prop {string} identifier
@ -188,7 +213,10 @@ function sanitizeSuffix(path) {
return normalize(path).replace(/^(\.\.(\/|\\|$))+/, "");
}
function chequearIdsDuplicados() {
/**
* @param {DownloadJob[]} jobs
*/
function chequearIdsDuplicados(jobs) {
const duplicated = hasDuplicates(
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
);
@ -206,17 +234,29 @@ function hasDuplicates(array) {
/** @argument {number} ms */
function wait(ms) {
if (ms < 0) return Promise.resolve();
return new Promise((resolve) => setTimeout(resolve, ms));
}
function encodeError(error) {
/**
* @param {{ dataset: Dataset, dist: Distribution, url?: URL }} job
* @param {any} error
*/
function encodeError(job, error) {
const always = {
url: job.url?.toString || job.dist.downloadURL,
datasetIdentifier: job.dataset.identifier,
distributionIdentifier: job.dist.identifier,
};
if (error instanceof StatusCodeError)
return { kind: "http_error", status_code: error.code };
return { ...always, kind: "http_error", status_code: error.code };
else if (error instanceof TooManyRedirectsError)
return { kind: "infinite_redirect" };
return { ...always, kind: "infinite_redirect" };
else {
return { kind: "generic_error", error: error.code || error.message };
return {
...always,
kind: "generic_error",
error: error.code || error.message,
};
}
}
@ -231,3 +271,12 @@ function patchUrl(url) {
}
return url;
}
// https://stackoverflow.com/a/12646864
/** @param {any[]} array */
function shuffleArray(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}

View file

@ -5,13 +5,13 @@
"description": "",
"main": "index.js",
"scripts": {
"run": "env NODE_EXTRA_CA_CERTS=node_modules/node_extra_ca_certs_mozilla_bundle/ca_bundle/ca_intermediate_root_bundle.pem node download_json.js"
"run": "env NODE_EXTRA_CA_CERTS=pki/ca_intermediate_root_bundle.pem node download_json.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"node_extra_ca_certs_mozilla_bundle": "^1.0.5",
"p-limit": "^5.0.0",
"undici": "^5.28.0"
},
"devDependencies": {

File diff suppressed because it is too large Load diff

1
pki/readme.md Normal file
View file

@ -0,0 +1 @@
Generado por [node_extra_ca_certs_mozilla_bundle](https://www.npmjs.com/package/node_extra_ca_certs_mozilla_bundle), lo copiamos acá para que sea más fácil bundlearlo.

View file

@ -5,9 +5,9 @@ settings:
excludeLinksFromLockfile: false
dependencies:
node_extra_ca_certs_mozilla_bundle:
specifier: ^1.0.5
version: 1.0.5
p-limit:
specifier: ^5.0.0
version: 5.0.0
undici:
specifier: ^5.28.0
version: 5.28.0
@ -30,138 +30,11 @@ packages:
undici-types: 5.26.5
dev: true
/asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false
/axios@0.27.2:
resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==}
/p-limit@5.0.0:
resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==}
engines: {node: '>=18'}
dependencies:
follow-redirects: 1.15.3
form-data: 4.0.0
transitivePeerDependencies:
- debug
dev: false
/bluebird@3.7.2:
resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==}
dev: false
/combined-stream@1.0.8:
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
engines: {node: '>= 0.8'}
dependencies:
delayed-stream: 1.0.0
dev: false
/cross-env@6.0.3:
resolution: {integrity: sha512-+KqxF6LCvfhWvADcDPqo64yVIB31gv/jQulX2NGzKS/g3GEVz6/pt4wjHFtFWsHMddebWD/sDthJemzM4MaAag==}
engines: {node: '>=8.0'}
hasBin: true
dependencies:
cross-spawn: 7.0.3
dev: false
/cross-spawn@7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'}
dependencies:
path-key: 3.1.1
shebang-command: 2.0.0
which: 2.0.2
dev: false
/csvtojson@2.0.10:
resolution: {integrity: sha512-lUWFxGKyhraKCW8Qghz6Z0f2l/PqB1W3AO0HKJzGIQ5JRSlR651ekJDiGJbBT4sRNNv5ddnSGVEnsxP9XRCVpQ==}
engines: {node: '>=4.0.0'}
hasBin: true
dependencies:
bluebird: 3.7.2
lodash: 4.17.21
strip-bom: 2.0.0
dev: false
/delayed-stream@1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dev: false
/follow-redirects@1.15.3:
resolution: {integrity: sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
dev: false
/form-data@4.0.0:
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
engines: {node: '>= 6'}
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
dev: false
/is-utf8@0.2.1:
resolution: {integrity: sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q==}
dev: false
/isexe@2.0.0:
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
dev: false
/lodash@4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
dev: false
/mime-db@1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
engines: {node: '>= 0.6'}
dev: false
/mime-types@2.1.35:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
dependencies:
mime-db: 1.52.0
dev: false
/node_extra_ca_certs_mozilla_bundle@1.0.5:
resolution: {integrity: sha512-Y+wek3qK8WYybCIxArGTmCEJCJ/6uGud/HCJECBZPIgagF9ba90nhnQMxBcMUAwQaR53iphGYp0JzlVPpUBsjg==}
requiresBuild: true
dependencies:
axios: 0.27.2
cross-env: 6.0.3
csvtojson: 2.0.10
transitivePeerDependencies:
- debug
dev: false
/path-key@3.1.1:
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
engines: {node: '>=8'}
dev: false
/shebang-command@2.0.0:
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
engines: {node: '>=8'}
dependencies:
shebang-regex: 3.0.0
dev: false
/shebang-regex@3.0.0:
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
engines: {node: '>=8'}
dev: false
/strip-bom@2.0.0:
resolution: {integrity: sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==}
engines: {node: '>=0.10.0'}
dependencies:
is-utf8: 0.2.1
yocto-queue: 1.0.0
dev: false
/undici-types@5.26.5:
@ -175,10 +48,7 @@ packages:
'@fastify/busboy': 2.1.0
dev: false
/which@2.0.2:
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
engines: {node: '>= 8'}
hasBin: true
dependencies:
isexe: 2.0.0
/yocto-queue@1.0.0:
resolution: {integrity: sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==}
engines: {node: '>=12.20'}
dev: false

View file

@ -13,11 +13,18 @@ pnpm run run download_json.js https://datos.gob.ar/data.json
# guarda en ./datos.gob.ar
```
## contenedor
```
docker run --rm -it -v ./data:/data gitea.nulo.in/nulo/transicion-desordenada-diablo/downloader
# descarga datos.gob.ar
```
## formato de repo guardado
- `{dominio de repo}`
- `data.json`
- `errors.jsonl`
- `errors.jsonl`: archivo con todos los errores que se obtuvieron al intentar descargar todo.
- `{identifier de dataset}`
- `{identifier de distribution}`
- `{fileName (o, si no existe, identifier de distribution)}`