mirror of
https://github.com/catdevnull/transicion-desordenada-diablo
synced 2024-11-14 18:21:38 +00:00
arreglar bugs y activar todos los data.json en container
This commit is contained in:
parent
704634c979
commit
ef697d1cae
3 changed files with 91 additions and 81 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -2,4 +2,5 @@ node_modules/
|
||||||
dataJsons/
|
dataJsons/
|
||||||
log
|
log
|
||||||
prueba
|
prueba
|
||||||
datos.gob.ar*
|
datos.gob.ar*
|
||||||
|
data/
|
|
@ -1,10 +1,14 @@
|
||||||
|
|
||||||
FROM docker.io/alpine:3.18 as build
|
FROM docker.io/alpine:3.18 as build
|
||||||
RUN apk add --no-cache npm esbuild
|
RUN apk add --no-cache npm esbuild
|
||||||
RUN npm install -g esbuild
|
RUN npm install -g esbuild
|
||||||
COPY package.json download_json.js /tmp/build/
|
WORKDIR /tmp/build
|
||||||
RUN cd /tmp/build && \
|
|
||||||
npm install && \
|
COPY package.json .
|
||||||
esbuild --bundle --format=cjs --platform=node --outfile=build.js download_json.js
|
RUN npm install
|
||||||
|
|
||||||
|
COPY download_json.js .
|
||||||
|
RUN esbuild --bundle --format=cjs --platform=node --outfile=build.js download_json.js
|
||||||
|
|
||||||
FROM docker.io/alpine:3.18
|
FROM docker.io/alpine:3.18
|
||||||
RUN apk add --no-cache nodejs-current tini
|
RUN apk add --no-cache nodejs-current tini
|
||||||
|
@ -12,25 +16,4 @@ COPY pki/ca_intermediate_root_bundle.pem /usr/lib/ca_intermediate_root_bundle.pe
|
||||||
COPY --from=build /tmp/build/build.js /usr/local/bin/download_json.js
|
COPY --from=build /tmp/build/build.js /usr/local/bin/download_json.js
|
||||||
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
|
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
|
||||||
WORKDIR /data
|
WORKDIR /data
|
||||||
CMD ["/sbin/tini", "node", "/usr/local/bin/download_json.js", "https://datos.gob.ar/data.json"]
|
CMD ["/sbin/tini", "node", "/usr/local/bin/download_json.js", "https://datos.gob.ar/data.json", "http://datos.energia.gob.ar/data.json", "https://datos.magyp.gob.ar/data.json", "https://datos.acumar.gov.ar/data.json", "https://datasets.datos.mincyt.gob.ar/data.json", "https://datos.arsat.com.ar/data.json", "https://datos.cultura.gob.ar/data.json", "https://datos.mininterior.gob.ar/data.json", "https://datos.produccion.gob.ar/data.json", "https://datos.salud.gob.ar/data.json", "https://datos.transporte.gob.ar/data.json", "https://ckan.ciudaddemendoza.gov.ar/data.json", "https://datos.santafe.gob.ar/data.json", "https://datosabiertos.chaco.gob.ar/data.json", "https://datosabiertos.gualeguaychu.gov.ar/data.json", "https://datosabiertos.mercedes.gob.ar/data.json", "http://luj-bue-datos.paisdigital.innovacion.gob.ar/data.json", "https://datosabiertos.desarrollosocial.gob.ar", "http://datos.mindef.gov.ar/data.json"]
|
||||||
|
|
||||||
# https://datos.gob.ar/data.json
|
|
||||||
# http://datos.energia.gob.ar/data.json
|
|
||||||
# https://datos.magyp.gob.ar/data.json
|
|
||||||
# https://datos.acumar.gov.ar/data.json
|
|
||||||
# https://datasets.datos.mincyt.gob.ar/data.json
|
|
||||||
# https://datos.arsat.com.ar/data.json
|
|
||||||
# https://datos.cultura.gob.ar/data.json
|
|
||||||
# https://datos.mininterior.gob.ar/data.json
|
|
||||||
# https://datos.produccion.gob.ar/data.json
|
|
||||||
# https://datos.salud.gob.ar/data.json
|
|
||||||
# https://datos.transporte.gob.ar/data.json
|
|
||||||
# https://ckan.ciudaddemendoza.gov.ar/data.json
|
|
||||||
# https://datos.santafe.gob.ar/data.json
|
|
||||||
# https://datosabiertos.chaco.gob.ar/data.json
|
|
||||||
# https://datosabiertos.gualeguaychu.gov.ar/data.json
|
|
||||||
# https://datosabiertos.mercedes.gob.ar/data.json
|
|
||||||
# http://luj-bue-datos.paisdigital.innovacion.gob.ar/data.json
|
|
||||||
|
|
||||||
#https://datos.mindef.gov.ar
|
|
||||||
#https://datosabiertos.desarrollosocial.gob.ar
|
|
||||||
|
|
134
download_json.js
134
download_json.js
|
@ -16,7 +16,7 @@ setGlobalDispatcher(
|
||||||
/** key es host
|
/** key es host
|
||||||
* @type {Map<string, import("p-limit").LimitFunction>} */
|
* @type {Map<string, import("p-limit").LimitFunction>} */
|
||||||
const limiters = new Map();
|
const limiters = new Map();
|
||||||
const nThreads = process.env.N_THREADS ? parseInt(process.env.N_THREADS) : 16;
|
const nThreads = process.env.N_THREADS ? parseInt(process.env.N_THREADS) : 8;
|
||||||
|
|
||||||
class StatusCodeError extends Error {
|
class StatusCodeError extends Error {
|
||||||
/**
|
/**
|
||||||
|
@ -28,13 +28,15 @@ class StatusCodeError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
class TooManyRedirectsError extends Error {}
|
class TooManyRedirectsError extends Error {}
|
||||||
|
const jsonUrls = process.argv.slice(2);
|
||||||
let jsonUrlString = process.argv[2];
|
if (jsonUrls.length < 1) {
|
||||||
if (!jsonUrlString) {
|
|
||||||
console.error("Especificamente el url al json porfa");
|
console.error("Especificamente el url al json porfa");
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
downloadFromData(jsonUrlString);
|
for (const url of jsonUrls)
|
||||||
|
downloadFromData(url).catch((error) =>
|
||||||
|
console.error(`${url} FALLÓ CON`, error)
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} jsonUrlString
|
* @param {string} jsonUrlString
|
||||||
|
@ -43,62 +45,86 @@ async function downloadFromData(jsonUrlString) {
|
||||||
const jsonUrl = new URL(jsonUrlString);
|
const jsonUrl = new URL(jsonUrlString);
|
||||||
const outputPath = jsonUrl.host;
|
const outputPath = jsonUrl.host;
|
||||||
await mkdir(outputPath, { recursive: true });
|
await mkdir(outputPath, { recursive: true });
|
||||||
const errorFile = await open(join(outputPath, "errors.jsonl"), "w");
|
const errorFile = (
|
||||||
|
await open(join(outputPath, "errors.jsonl"), "w")
|
||||||
|
).createWriteStream();
|
||||||
|
|
||||||
const jsonRes = await fetch(jsonUrl);
|
try {
|
||||||
// prettier-ignore
|
const jsonRes = await fetch(jsonUrl);
|
||||||
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
// prettier-ignore
|
||||||
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
||||||
|
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
||||||
|
|
||||||
/** @type {DownloadJob[]} */
|
/** @type {DownloadJob[]} */
|
||||||
const jobs = parsed.dataset.flatMap((dataset) =>
|
const jobs = parsed.dataset.flatMap((dataset) =>
|
||||||
dataset.distribution.map((dist) => ({
|
dataset.distribution
|
||||||
dataset,
|
.filter((dist) => {
|
||||||
dist,
|
try {
|
||||||
url: patchUrl(new URL(dist.downloadURL)),
|
patchUrl(new URL(dist.downloadURL));
|
||||||
outputPath,
|
return true;
|
||||||
attempts: 0,
|
} catch (error) {
|
||||||
}))
|
errorFile.write(
|
||||||
);
|
JSON.stringify({
|
||||||
const totalJobs = jobs.length;
|
url: dist.downloadURL,
|
||||||
let nFinished = 0;
|
...encodeError(error),
|
||||||
let nErrors = 0;
|
}) + "\n"
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map((dist) => ({
|
||||||
|
dataset,
|
||||||
|
dist,
|
||||||
|
url: patchUrl(new URL(dist.downloadURL)),
|
||||||
|
outputPath,
|
||||||
|
attempts: 0,
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
const totalJobs = jobs.length;
|
||||||
|
let nFinished = 0;
|
||||||
|
let nErrors = 0;
|
||||||
|
|
||||||
// por las dudas verificar que no hayan archivos duplicados
|
// por las dudas verificar que no hayan archivos duplicados
|
||||||
chequearIdsDuplicados(jobs);
|
chequearIdsDuplicados(jobs);
|
||||||
|
|
||||||
shuffleArray(jobs);
|
shuffleArray(jobs);
|
||||||
|
|
||||||
const promises = jobs.map((job) => {
|
const promises = jobs.map((job) => {
|
||||||
let limit = limiters.get(job.url.host);
|
let limit = limiters.get(job.url.host);
|
||||||
if (!limit) {
|
if (!limit) {
|
||||||
limit = pLimit(nThreads);
|
limit = pLimit(nThreads);
|
||||||
limiters.set(job.url.host, limit);
|
limiters.set(job.url.host, limit);
|
||||||
}
|
|
||||||
return limit(async () => {
|
|
||||||
try {
|
|
||||||
await downloadDistWithRetries(job);
|
|
||||||
} catch (error) {
|
|
||||||
await errorFile.write(
|
|
||||||
JSON.stringify({
|
|
||||||
url: job.url.toString(),
|
|
||||||
...encodeError(error),
|
|
||||||
}) + "\n"
|
|
||||||
);
|
|
||||||
nErrors++;
|
|
||||||
} finally {
|
|
||||||
nFinished++;
|
|
||||||
}
|
}
|
||||||
|
return limit(async () => {
|
||||||
|
try {
|
||||||
|
await downloadDistWithRetries(job);
|
||||||
|
} catch (error) {
|
||||||
|
await errorFile.write(
|
||||||
|
JSON.stringify({
|
||||||
|
url: job.url.toString(),
|
||||||
|
...encodeError(error),
|
||||||
|
}) + "\n"
|
||||||
|
);
|
||||||
|
nErrors++;
|
||||||
|
} finally {
|
||||||
|
nFinished++;
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
process.stderr.write(`info: 0/${totalJobs} done\n`);
|
process.stderr.write(`info[${jsonUrl.host}]: 0/${totalJobs} done\n`);
|
||||||
const interval = setInterval(() => {
|
const interval = setInterval(() => {
|
||||||
process.stderr.write(`info: ${nFinished}/${totalJobs} done\n`);
|
process.stderr.write(
|
||||||
}, 30000);
|
`info[${jsonUrl.host}]: ${nFinished}/${totalJobs} done\n`
|
||||||
await Promise.all(promises);
|
);
|
||||||
clearInterval(interval);
|
}, 30000);
|
||||||
if (nErrors > 0) console.error(`Finished with ${nErrors} errors`);
|
await Promise.all(promises);
|
||||||
|
clearInterval(interval);
|
||||||
|
if (nErrors > 0)
|
||||||
|
console.error(`${jsonUrl.host}: Finished with ${nErrors} errors`);
|
||||||
|
} finally {
|
||||||
|
errorFile.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -125,7 +151,7 @@ async function downloadDistWithRetries(job, attempts = 0) {
|
||||||
else if (
|
else if (
|
||||||
!(error instanceof StatusCodeError) &&
|
!(error instanceof StatusCodeError) &&
|
||||||
!(error instanceof TooManyRedirectsError) &&
|
!(error instanceof TooManyRedirectsError) &&
|
||||||
attempts < 5
|
attempts < 10
|
||||||
) {
|
) {
|
||||||
await wait(5000);
|
await wait(5000);
|
||||||
return await downloadDistWithRetries(job, attempts + 1);
|
return await downloadDistWithRetries(job, attempts + 1);
|
||||||
|
|
Loading…
Reference in a new issue