mirror of
https://github.com/catdevnull/transicion-desordenada-diablo
synced 2024-11-26 03:26:18 +00:00
Compare commits
7 commits
16a2b6252a
...
14b94d53df
Author | SHA1 | Date | |
---|---|---|---|
14b94d53df | |||
1b84bff65d | |||
5870592ac6 | |||
ef697d1cae | |||
704634c979 | |||
4a3a117e0f | |||
42aec50b7f |
8 changed files with 77909 additions and 220 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -3,3 +3,4 @@ dataJsons/
|
||||||
log
|
log
|
||||||
prueba
|
prueba
|
||||||
datos.gob.ar*
|
datos.gob.ar*
|
||||||
|
data/
|
19
Containerfile
Normal file
19
Containerfile
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
|
||||||
|
FROM docker.io/alpine:3.18 as build
|
||||||
|
RUN apk add --no-cache npm esbuild
|
||||||
|
RUN npm install -g esbuild
|
||||||
|
WORKDIR /tmp/build
|
||||||
|
|
||||||
|
COPY package.json .
|
||||||
|
RUN npm install
|
||||||
|
|
||||||
|
COPY download_json.js .
|
||||||
|
RUN esbuild --bundle --format=cjs --platform=node --outfile=build.js download_json.js
|
||||||
|
|
||||||
|
FROM docker.io/alpine:3.18
|
||||||
|
RUN apk add --no-cache nodejs-current tini
|
||||||
|
COPY pki/ca_intermediate_root_bundle.pem /usr/lib/ca_intermediate_root_bundle.pem
|
||||||
|
COPY --from=build /tmp/build/build.js /usr/local/bin/download_json.js
|
||||||
|
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
|
||||||
|
WORKDIR /data
|
||||||
|
CMD ["/sbin/tini", "node", "/usr/local/bin/download_json.js", "https://datos.gob.ar/data.json", "http://datos.energia.gob.ar/data.json", "https://datos.magyp.gob.ar/data.json", "https://datos.acumar.gov.ar/data.json", "https://datasets.datos.mincyt.gob.ar/data.json", "https://datos.arsat.com.ar/data.json", "https://datos.cultura.gob.ar/data.json", "https://datos.mininterior.gob.ar/data.json", "https://datos.produccion.gob.ar/data.json", "https://datos.salud.gob.ar/data.json", "https://datos.transporte.gob.ar/data.json", "https://ckan.ciudaddemendoza.gov.ar/data.json", "https://datos.santafe.gob.ar/data.json", "https://datosabiertos.chaco.gob.ar/data.json", "https://datosabiertos.gualeguaychu.gov.ar/data.json", "https://datosabiertos.mercedes.gob.ar/data.json", "http://luj-bue-datos.paisdigital.innovacion.gob.ar/data.json", "https://datosabiertos.desarrollosocial.gob.ar", "http://datos.mindef.gov.ar/data.json"]
|
185
download_json.js
185
download_json.js
|
@ -2,7 +2,7 @@
|
||||||
import { mkdir, open, writeFile } from "node:fs/promises";
|
import { mkdir, open, writeFile } from "node:fs/promises";
|
||||||
import { Agent, fetch, request, setGlobalDispatcher } from "undici";
|
import { Agent, fetch, request, setGlobalDispatcher } from "undici";
|
||||||
import { join, normalize } from "node:path";
|
import { join, normalize } from "node:path";
|
||||||
import { pipeline } from "node:stream/promises";
|
import pLimit from "p-limit";
|
||||||
|
|
||||||
// FYI: al menos los siguientes dominios no tienen la cadena completa de certificados en HTTPS. tenemos que usar un hack (node_extra_ca_certs_mozilla_bundle) para conectarnos a estos sitios. (se puede ver con ssllabs.com) ojalá lxs administradorxs de estos servidores lo arreglen.
|
// FYI: al menos los siguientes dominios no tienen la cadena completa de certificados en HTTPS. tenemos que usar un hack (node_extra_ca_certs_mozilla_bundle) para conectarnos a estos sitios. (se puede ver con ssllabs.com) ojalá lxs administradorxs de estos servidores lo arreglen.
|
||||||
// www.enargas.gov.ar, transparencia.enargas.gov.ar, www.energia.gob.ar, www.economia.gob.ar, datos.yvera.gob.ar
|
// www.enargas.gov.ar, transparencia.enargas.gov.ar, www.energia.gob.ar, www.economia.gob.ar, datos.yvera.gob.ar
|
||||||
|
@ -13,6 +13,11 @@ setGlobalDispatcher(
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/** key es host
|
||||||
|
* @type {Map<string, import("p-limit").LimitFunction>} */
|
||||||
|
const limiters = new Map();
|
||||||
|
const nThreads = process.env.N_THREADS ? parseInt(process.env.N_THREADS) : 8;
|
||||||
|
|
||||||
class StatusCodeError extends Error {
|
class StatusCodeError extends Error {
|
||||||
/**
|
/**
|
||||||
* @param {number} code
|
* @param {number} code
|
||||||
|
@ -23,84 +28,102 @@ class StatusCodeError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
class TooManyRedirectsError extends Error {}
|
class TooManyRedirectsError extends Error {}
|
||||||
|
const jsonUrls = process.argv.slice(2);
|
||||||
let jsonUrlString = process.argv[2];
|
if (jsonUrls.length < 1) {
|
||||||
if (!jsonUrlString) {
|
|
||||||
console.error("Especificamente el url al json porfa");
|
console.error("Especificamente el url al json porfa");
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
const jsonUrl = new URL(jsonUrlString);
|
for (const url of jsonUrls)
|
||||||
const outputPath = jsonUrl.host;
|
downloadFromData(url).catch((error) =>
|
||||||
await mkdir(outputPath, { recursive: true });
|
console.error(`${url} FALLÓ CON`, error)
|
||||||
const errorFile = await open(join(outputPath, "errors.jsonl"), "w");
|
);
|
||||||
|
|
||||||
const jsonRes = await fetch(jsonUrl);
|
/**
|
||||||
// prettier-ignore
|
* @param {string} jsonUrlString
|
||||||
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
*/
|
||||||
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
async function downloadFromData(jsonUrlString) {
|
||||||
|
const jsonUrl = new URL(jsonUrlString);
|
||||||
|
const outputPath = jsonUrl.host;
|
||||||
|
await mkdir(outputPath, { recursive: true });
|
||||||
|
const errorFile = (
|
||||||
|
await open(join(outputPath, "errors.jsonl"), "w")
|
||||||
|
).createWriteStream();
|
||||||
|
|
||||||
const jobs = parsed.dataset.flatMap((dataset) =>
|
try {
|
||||||
dataset.distribution.map((dist) => ({
|
const jsonRes = await fetch(jsonUrl);
|
||||||
|
// prettier-ignore
|
||||||
|
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
||||||
|
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
||||||
|
|
||||||
|
/** @type {DownloadJob[]} */
|
||||||
|
const jobs = parsed.dataset.flatMap((dataset) =>
|
||||||
|
dataset.distribution
|
||||||
|
.filter((dist) => {
|
||||||
|
try {
|
||||||
|
patchUrl(new URL(dist.downloadURL));
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
errorFile.write(
|
||||||
|
JSON.stringify(encodeError({ dataset, dist }, error)) + "\n"
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map((dist) => ({
|
||||||
dataset,
|
dataset,
|
||||||
dist,
|
dist,
|
||||||
url: patchUrl(new URL(dist.downloadURL)),
|
url: patchUrl(new URL(dist.downloadURL)),
|
||||||
|
outputPath,
|
||||||
|
attempts: 0,
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
const totalJobs = jobs.length;
|
const totalJobs = jobs.length;
|
||||||
let nFinished = 0;
|
let nFinished = 0;
|
||||||
let nErrors = 0;
|
let nErrors = 0;
|
||||||
|
|
||||||
// por las dudas verificar que no hayan archivos duplicados
|
// por las dudas verificar que no hayan archivos duplicados
|
||||||
chequearIdsDuplicados();
|
chequearIdsDuplicados(jobs);
|
||||||
|
|
||||||
/** @type {Map< string, DownloadJob[] >} */
|
shuffleArray(jobs);
|
||||||
let jobsPerHost = new Map();
|
|
||||||
for (const job of jobs) {
|
|
||||||
jobsPerHost.set(job.url.host, [
|
|
||||||
...(jobsPerHost.get(job.url.host) || []),
|
|
||||||
job,
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const greens = [...jobsPerHost.entries()].flatMap(([host, jobs]) => {
|
const promises = jobs.map((job) => {
|
||||||
const nThreads = 8;
|
let limit = limiters.get(job.url.host);
|
||||||
return Array(nThreads)
|
if (!limit) {
|
||||||
.fill(0)
|
limit = pLimit(nThreads);
|
||||||
.map(() =>
|
limiters.set(job.url.host, limit);
|
||||||
(async () => {
|
}
|
||||||
let job;
|
return limit(async () => {
|
||||||
while ((job = jobs.pop())) {
|
|
||||||
try {
|
try {
|
||||||
await downloadDistWithRetries(job);
|
await downloadDistWithRetries(job);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await errorFile.write(
|
await errorFile.write(JSON.stringify(job, encodeError(error)) + "\n");
|
||||||
JSON.stringify({
|
|
||||||
url: job.url.toString(),
|
|
||||||
...encodeError(error),
|
|
||||||
}) + "\n"
|
|
||||||
);
|
|
||||||
nErrors++;
|
nErrors++;
|
||||||
} finally {
|
} finally {
|
||||||
nFinished++;
|
nFinished++;
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
})()
|
});
|
||||||
);
|
|
||||||
});
|
|
||||||
process.stderr.write(`greens: ${greens.length}\n`);
|
|
||||||
|
|
||||||
const interval = setInterval(() => {
|
process.stderr.write(`info[${jsonUrl.host}]: 0/${totalJobs} done\n`);
|
||||||
process.stderr.write(`info: ${nFinished}/${totalJobs} done\n`);
|
const interval = setInterval(() => {
|
||||||
}, 30000);
|
process.stderr.write(
|
||||||
await Promise.all(greens);
|
`info[${jsonUrl.host}]: ${nFinished}/${totalJobs} done\n`
|
||||||
clearInterval(interval);
|
);
|
||||||
if (nErrors > 0) console.error(`Finished with ${nErrors} errors`);
|
}, 30000);
|
||||||
|
await Promise.all(promises);
|
||||||
|
clearInterval(interval);
|
||||||
|
if (nErrors > 0)
|
||||||
|
console.error(`${jsonUrl.host}: Finished with ${nErrors} errors`);
|
||||||
|
} finally {
|
||||||
|
errorFile.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @argument {DownloadJob} job
|
* @argument {DownloadJob} job
|
||||||
* @argument {number} tries
|
* @argument {number} attempts
|
||||||
*/
|
*/
|
||||||
async function downloadDistWithRetries(job, tries = 0) {
|
async function downloadDistWithRetries(job, attempts = 0) {
|
||||||
const { url } = job;
|
const { url } = job;
|
||||||
try {
|
try {
|
||||||
await downloadDist(job);
|
await downloadDist(job);
|
||||||
|
@ -111,19 +134,19 @@ async function downloadDistWithRetries(job, tries = 0) {
|
||||||
error instanceof StatusCodeError &&
|
error instanceof StatusCodeError &&
|
||||||
error.code === 403 &&
|
error.code === 403 &&
|
||||||
url.host === "minsegar-my.sharepoint.com" &&
|
url.host === "minsegar-my.sharepoint.com" &&
|
||||||
tries < 15
|
attempts < 15
|
||||||
) {
|
) {
|
||||||
await wait(15000);
|
await wait(15000);
|
||||||
return await downloadDistWithRetries(job, tries + 1);
|
return await downloadDistWithRetries(job, attempts + 1);
|
||||||
}
|
}
|
||||||
// si no fue un error de http, reintentar hasta 5 veces con 5 segundos de por medio
|
// si no fue un error de http, reintentar hasta 5 veces con 5 segundos de por medio
|
||||||
else if (
|
else if (
|
||||||
!(error instanceof StatusCodeError) &&
|
!(error instanceof StatusCodeError) &&
|
||||||
!(error instanceof TooManyRedirectsError) &&
|
!(error instanceof TooManyRedirectsError) &&
|
||||||
tries < 5
|
attempts < 10
|
||||||
) {
|
) {
|
||||||
await wait(5000);
|
await wait(5000);
|
||||||
return await downloadDistWithRetries(job, tries + 1);
|
return await downloadDistWithRetries(job, attempts + 1);
|
||||||
} else throw error;
|
} else throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,7 +154,7 @@ async function downloadDistWithRetries(job, tries = 0) {
|
||||||
/**
|
/**
|
||||||
* @argument {DownloadJob} job
|
* @argument {DownloadJob} job
|
||||||
*/
|
*/
|
||||||
async function downloadDist({ dist, dataset, url }) {
|
async function downloadDist({ dist, dataset, url, outputPath }) {
|
||||||
// sharepoint no le gusta compartir a bots lol
|
// sharepoint no le gusta compartir a bots lol
|
||||||
const spoofUserAgent = url.host.endsWith("sharepoint.com");
|
const spoofUserAgent = url.host.endsWith("sharepoint.com");
|
||||||
|
|
||||||
|
@ -159,16 +182,18 @@ async function downloadDist({ dist, dataset, url }) {
|
||||||
fileDirPath,
|
fileDirPath,
|
||||||
sanitizeSuffix(dist.fileName || dist.identifier)
|
sanitizeSuffix(dist.fileName || dist.identifier)
|
||||||
);
|
);
|
||||||
const outputFile = await open(filePath, "w");
|
|
||||||
|
|
||||||
if (!res.body) throw new Error("no body");
|
if (!res.body) throw new Error("no body");
|
||||||
await pipeline(res.body, outputFile.createWriteStream());
|
await writeFile(filePath, res.body);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @typedef DownloadJob
|
/** @typedef DownloadJob
|
||||||
* @prop {Dataset} dataset
|
* @prop {Dataset} dataset
|
||||||
* @prop {Distribution} dist
|
* @prop {Distribution} dist
|
||||||
* @prop {URL} url
|
* @prop {URL} url
|
||||||
|
* @prop {string} outputPath
|
||||||
|
* @prop {number} attempts
|
||||||
|
* @prop {Date=} waitUntil
|
||||||
*/
|
*/
|
||||||
/** @typedef Dataset
|
/** @typedef Dataset
|
||||||
* @prop {string} identifier
|
* @prop {string} identifier
|
||||||
|
@ -188,7 +213,10 @@ function sanitizeSuffix(path) {
|
||||||
return normalize(path).replace(/^(\.\.(\/|\\|$))+/, "");
|
return normalize(path).replace(/^(\.\.(\/|\\|$))+/, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
function chequearIdsDuplicados() {
|
/**
|
||||||
|
* @param {DownloadJob[]} jobs
|
||||||
|
*/
|
||||||
|
function chequearIdsDuplicados(jobs) {
|
||||||
const duplicated = hasDuplicates(
|
const duplicated = hasDuplicates(
|
||||||
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
|
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
|
||||||
);
|
);
|
||||||
|
@ -206,17 +234,29 @@ function hasDuplicates(array) {
|
||||||
|
|
||||||
/** @argument {number} ms */
|
/** @argument {number} ms */
|
||||||
function wait(ms) {
|
function wait(ms) {
|
||||||
if (ms < 0) return Promise.resolve();
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeError(error) {
|
/**
|
||||||
|
* @param {{ dataset: Dataset, dist: Distribution, url?: URL }} job
|
||||||
|
* @param {any} error
|
||||||
|
*/
|
||||||
|
function encodeError(job, error) {
|
||||||
|
const always = {
|
||||||
|
url: job.url?.toString || job.dist.downloadURL,
|
||||||
|
datasetIdentifier: job.dataset.identifier,
|
||||||
|
distributionIdentifier: job.dist.identifier,
|
||||||
|
};
|
||||||
if (error instanceof StatusCodeError)
|
if (error instanceof StatusCodeError)
|
||||||
return { kind: "http_error", status_code: error.code };
|
return { ...always, kind: "http_error", status_code: error.code };
|
||||||
else if (error instanceof TooManyRedirectsError)
|
else if (error instanceof TooManyRedirectsError)
|
||||||
return { kind: "infinite_redirect" };
|
return { ...always, kind: "infinite_redirect" };
|
||||||
else {
|
else {
|
||||||
return { kind: "generic_error", error: error.code || error.message };
|
return {
|
||||||
|
...always,
|
||||||
|
kind: "generic_error",
|
||||||
|
error: error.code || error.message,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,3 +271,12 @@ function patchUrl(url) {
|
||||||
}
|
}
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://stackoverflow.com/a/12646864
|
||||||
|
/** @param {any[]} array */
|
||||||
|
function shuffleArray(array) {
|
||||||
|
for (let i = array.length - 1; i > 0; i--) {
|
||||||
|
const j = Math.floor(Math.random() * (i + 1));
|
||||||
|
[array[i], array[j]] = [array[j], array[i]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -5,13 +5,13 @@
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"run": "env NODE_EXTRA_CA_CERTS=node_modules/node_extra_ca_certs_mozilla_bundle/ca_bundle/ca_intermediate_root_bundle.pem node download_json.js"
|
"run": "env NODE_EXTRA_CA_CERTS=pki/ca_intermediate_root_bundle.pem node download_json.js"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"node_extra_ca_certs_mozilla_bundle": "^1.0.5",
|
"p-limit": "^5.0.0",
|
||||||
"undici": "^5.28.0"
|
"undici": "^5.28.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
77742
pki/ca_intermediate_root_bundle.pem
Normal file
77742
pki/ca_intermediate_root_bundle.pem
Normal file
File diff suppressed because it is too large
Load diff
1
pki/readme.md
Normal file
1
pki/readme.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Generado por [node_extra_ca_certs_mozilla_bundle](https://www.npmjs.com/package/node_extra_ca_certs_mozilla_bundle), lo copiamos acá para que sea más fácil bundlearlo.
|
150
pnpm-lock.yaml
150
pnpm-lock.yaml
|
@ -5,9 +5,9 @@ settings:
|
||||||
excludeLinksFromLockfile: false
|
excludeLinksFromLockfile: false
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
node_extra_ca_certs_mozilla_bundle:
|
p-limit:
|
||||||
specifier: ^1.0.5
|
specifier: ^5.0.0
|
||||||
version: 1.0.5
|
version: 5.0.0
|
||||||
undici:
|
undici:
|
||||||
specifier: ^5.28.0
|
specifier: ^5.28.0
|
||||||
version: 5.28.0
|
version: 5.28.0
|
||||||
|
@ -30,138 +30,11 @@ packages:
|
||||||
undici-types: 5.26.5
|
undici-types: 5.26.5
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/asynckit@0.4.0:
|
/p-limit@5.0.0:
|
||||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==}
|
||||||
dev: false
|
engines: {node: '>=18'}
|
||||||
|
|
||||||
/axios@0.27.2:
|
|
||||||
resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==}
|
|
||||||
dependencies:
|
dependencies:
|
||||||
follow-redirects: 1.15.3
|
yocto-queue: 1.0.0
|
||||||
form-data: 4.0.0
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/bluebird@3.7.2:
|
|
||||||
resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/combined-stream@1.0.8:
|
|
||||||
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
|
|
||||||
engines: {node: '>= 0.8'}
|
|
||||||
dependencies:
|
|
||||||
delayed-stream: 1.0.0
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/cross-env@6.0.3:
|
|
||||||
resolution: {integrity: sha512-+KqxF6LCvfhWvADcDPqo64yVIB31gv/jQulX2NGzKS/g3GEVz6/pt4wjHFtFWsHMddebWD/sDthJemzM4MaAag==}
|
|
||||||
engines: {node: '>=8.0'}
|
|
||||||
hasBin: true
|
|
||||||
dependencies:
|
|
||||||
cross-spawn: 7.0.3
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/cross-spawn@7.0.3:
|
|
||||||
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
|
||||||
engines: {node: '>= 8'}
|
|
||||||
dependencies:
|
|
||||||
path-key: 3.1.1
|
|
||||||
shebang-command: 2.0.0
|
|
||||||
which: 2.0.2
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/csvtojson@2.0.10:
|
|
||||||
resolution: {integrity: sha512-lUWFxGKyhraKCW8Qghz6Z0f2l/PqB1W3AO0HKJzGIQ5JRSlR651ekJDiGJbBT4sRNNv5ddnSGVEnsxP9XRCVpQ==}
|
|
||||||
engines: {node: '>=4.0.0'}
|
|
||||||
hasBin: true
|
|
||||||
dependencies:
|
|
||||||
bluebird: 3.7.2
|
|
||||||
lodash: 4.17.21
|
|
||||||
strip-bom: 2.0.0
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/delayed-stream@1.0.0:
|
|
||||||
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
|
|
||||||
engines: {node: '>=0.4.0'}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/follow-redirects@1.15.3:
|
|
||||||
resolution: {integrity: sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==}
|
|
||||||
engines: {node: '>=4.0'}
|
|
||||||
peerDependencies:
|
|
||||||
debug: '*'
|
|
||||||
peerDependenciesMeta:
|
|
||||||
debug:
|
|
||||||
optional: true
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/form-data@4.0.0:
|
|
||||||
resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==}
|
|
||||||
engines: {node: '>= 6'}
|
|
||||||
dependencies:
|
|
||||||
asynckit: 0.4.0
|
|
||||||
combined-stream: 1.0.8
|
|
||||||
mime-types: 2.1.35
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/is-utf8@0.2.1:
|
|
||||||
resolution: {integrity: sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q==}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/isexe@2.0.0:
|
|
||||||
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/lodash@4.17.21:
|
|
||||||
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/mime-db@1.52.0:
|
|
||||||
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
|
|
||||||
engines: {node: '>= 0.6'}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/mime-types@2.1.35:
|
|
||||||
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
|
|
||||||
engines: {node: '>= 0.6'}
|
|
||||||
dependencies:
|
|
||||||
mime-db: 1.52.0
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/node_extra_ca_certs_mozilla_bundle@1.0.5:
|
|
||||||
resolution: {integrity: sha512-Y+wek3qK8WYybCIxArGTmCEJCJ/6uGud/HCJECBZPIgagF9ba90nhnQMxBcMUAwQaR53iphGYp0JzlVPpUBsjg==}
|
|
||||||
requiresBuild: true
|
|
||||||
dependencies:
|
|
||||||
axios: 0.27.2
|
|
||||||
cross-env: 6.0.3
|
|
||||||
csvtojson: 2.0.10
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/path-key@3.1.1:
|
|
||||||
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/shebang-command@2.0.0:
|
|
||||||
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
dependencies:
|
|
||||||
shebang-regex: 3.0.0
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/shebang-regex@3.0.0:
|
|
||||||
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
dev: false
|
|
||||||
|
|
||||||
/strip-bom@2.0.0:
|
|
||||||
resolution: {integrity: sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
dependencies:
|
|
||||||
is-utf8: 0.2.1
|
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/undici-types@5.26.5:
|
/undici-types@5.26.5:
|
||||||
|
@ -175,10 +48,7 @@ packages:
|
||||||
'@fastify/busboy': 2.1.0
|
'@fastify/busboy': 2.1.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/which@2.0.2:
|
/yocto-queue@1.0.0:
|
||||||
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
|
resolution: {integrity: sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==}
|
||||||
engines: {node: '>= 8'}
|
engines: {node: '>=12.20'}
|
||||||
hasBin: true
|
|
||||||
dependencies:
|
|
||||||
isexe: 2.0.0
|
|
||||||
dev: false
|
dev: false
|
||||||
|
|
|
@ -13,11 +13,18 @@ pnpm run run download_json.js https://datos.gob.ar/data.json
|
||||||
# guarda en ./datos.gob.ar
|
# guarda en ./datos.gob.ar
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## contenedor
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run --rm -it -v ./data:/data gitea.nulo.in/nulo/transicion-desordenada-diablo/downloader
|
||||||
|
# descarga datos.gob.ar
|
||||||
|
```
|
||||||
|
|
||||||
## formato de repo guardado
|
## formato de repo guardado
|
||||||
|
|
||||||
- `{dominio de repo}`
|
- `{dominio de repo}`
|
||||||
- `data.json`
|
- `data.json`
|
||||||
- `errors.jsonl`
|
- `errors.jsonl`: archivo con todos los errores que se obtuvieron al intentar descargar todo.
|
||||||
- `{identifier de dataset}`
|
- `{identifier de dataset}`
|
||||||
- `{identifier de distribution}`
|
- `{identifier de distribution}`
|
||||||
- `{fileName (o, si no existe, identifier de distribution)}`
|
- `{fileName (o, si no existe, identifier de distribution)}`
|
||||||
|
|
Loading…
Reference in a new issue