mirror of
https://github.com/catdevnull/transicion-desordenada-diablo
synced 2024-11-26 11:26:18 +00:00
Compare commits
No commits in common. "b9fabe6d8db3620c9676b612a2a80e8cd81af63f" and "fd41efab3d4e4eb92f34e2f1140fddb73ceffdda" have entirely different histories.
b9fabe6d8d
...
fd41efab3d
25 changed files with 138 additions and 2295 deletions
|
@ -1,11 +0,0 @@
|
|||
*/node_modules/
|
||||
node_modules/
|
||||
dataJsons/
|
||||
log
|
||||
prueba
|
||||
datos.gob.ar*
|
||||
data/
|
||||
data*
|
||||
downloader/data
|
||||
|
||||
*.zip
|
19
Containerfile
Normal file
19
Containerfile
Normal file
|
@ -0,0 +1,19 @@
|
|||
|
||||
FROM docker.io/alpine:3.18 as build
|
||||
RUN apk add --no-cache npm
|
||||
RUN npm install -g esbuild
|
||||
WORKDIR /tmp/build
|
||||
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
|
||||
COPY download_json.js .
|
||||
RUN esbuild --bundle --format=cjs --platform=node --outfile=build.js --sourcemap=inline download_json.js
|
||||
|
||||
FROM docker.io/alpine:3.18
|
||||
RUN apk add --no-cache nodejs-current tini
|
||||
COPY pki/ca_intermediate_root_bundle.pem /usr/lib/ca_intermediate_root_bundle.pem
|
||||
COPY --from=build /tmp/build/build.js /usr/local/bin/download_json.js
|
||||
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
|
||||
WORKDIR /data
|
||||
CMD ["/sbin/tini", "node", "--enable-source-maps", "/usr/local/bin/download_json.js"]
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"name": "common",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["es2023"],
|
||||
"module": "ES2020",
|
||||
"target": "es2022",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"declaration": true
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
|
||||
FROM docker.io/alpine:3.18 as build
|
||||
RUN apk add --no-cache npm \
|
||||
&& npm install -g esbuild pnpm
|
||||
|
||||
COPY .. /tmp/build/
|
||||
WORKDIR /tmp/build/downloader
|
||||
RUN pnpm install \
|
||||
&& esbuild --bundle --format=cjs --platform=node --outfile=download_json.build.js --sourcemap=inline download_json.js \
|
||||
&& esbuild --bundle --format=cjs --platform=node --outfile=generate_dump_metadata.build.js --sourcemap=inline generate_dump_metadata.js
|
||||
|
||||
FROM docker.io/alpine:3.18
|
||||
RUN apk add --no-cache nodejs-current tini
|
||||
COPY downloader/pki/ca_intermediate_root_bundle.pem /usr/lib/ca_intermediate_root_bundle.pem
|
||||
COPY --from=build /tmp/build/downloader/download_json.build.js /usr/local/bin/download_json.js
|
||||
COPY --from=build /tmp/build/downloader/generate_dump_metadata.build.js /usr/local/bin/generate_dump_metadata.js
|
||||
ENV NODE_EXTRA_CA_CERTS=/usr/lib/ca_intermediate_root_bundle.pem
|
||||
WORKDIR /data
|
||||
CMD ["/sbin/tini", "node", "--enable-source-maps", "/usr/local/bin/download_json.js"]
|
|
@ -4,7 +4,7 @@ import { Agent, fetch, request, setGlobalDispatcher } from "undici";
|
|||
import { join, normalize } from "node:path";
|
||||
import pLimit from "p-limit";
|
||||
|
||||
export const sitiosPorDefecto = [
|
||||
const sitiosPorDefecto = [
|
||||
"https://datos.gob.ar/data.json",
|
||||
"http://datos.energia.gob.ar/data.json",
|
||||
"https://datos.magyp.gob.ar/data.json",
|
||||
|
@ -63,7 +63,7 @@ export const sitiosPorDefecto = [
|
|||
setGlobalDispatcher(
|
||||
new Agent({
|
||||
pipelining: 0,
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
/** key es host
|
||||
|
@ -85,26 +85,29 @@ let jsonUrls = process.argv.slice(2);
|
|||
if (jsonUrls.length < 1) {
|
||||
jsonUrls = sitiosPorDefecto;
|
||||
}
|
||||
writeFile("readme.txt", generateReadme(jsonUrls));
|
||||
for (const url of jsonUrls)
|
||||
downloadFromData(url).catch((error) =>
|
||||
console.error(`${url} FALLÓ CON`, error)
|
||||
console.error(`${url} FALLÓ CON`, error),
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {string} jsonUrl
|
||||
* @param {string} jsonUrlString
|
||||
*/
|
||||
async function downloadFromData(jsonUrl) {
|
||||
const outputPath = generateOutputPath(jsonUrl);
|
||||
const jsonRes = await fetch(jsonUrl);
|
||||
// prettier-ignore
|
||||
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
||||
async function downloadFromData(jsonUrlString) {
|
||||
const jsonUrl = new URL(jsonUrlString);
|
||||
const outputPath = `${jsonUrl.host}${jsonUrl.pathname}`.replaceAll("/", "_");
|
||||
await mkdir(outputPath, { recursive: true });
|
||||
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
||||
await writeFile(join(outputPath, "url.txt"), jsonUrl);
|
||||
const errorFile = (
|
||||
await open(join(outputPath, "errors.jsonl"), "w")
|
||||
).createWriteStream();
|
||||
|
||||
try {
|
||||
const jsonRes = await fetch(jsonUrl);
|
||||
// prettier-ignore
|
||||
const parsed = /** @type {{ dataset: Dataset[] }} */(await jsonRes.json())
|
||||
await writeFile(join(outputPath, "data.json"), JSON.stringify(parsed));
|
||||
|
||||
/** @type {DownloadJob[]} */
|
||||
const jobs = parsed.dataset.flatMap((dataset) =>
|
||||
dataset.distribution
|
||||
|
@ -114,7 +117,7 @@ async function downloadFromData(jsonUrl) {
|
|||
return true;
|
||||
} catch (error) {
|
||||
errorFile.write(
|
||||
JSON.stringify(encodeError({ dataset, dist }, error)) + "\n"
|
||||
JSON.stringify(encodeError({ dataset, dist }, error)) + "\n",
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
@ -125,7 +128,7 @@ async function downloadFromData(jsonUrl) {
|
|||
url: patchUrl(new URL(dist.downloadURL)),
|
||||
outputPath,
|
||||
attempts: 0,
|
||||
}))
|
||||
})),
|
||||
);
|
||||
const totalJobs = jobs.length;
|
||||
let nFinished = 0;
|
||||
|
@ -157,7 +160,7 @@ async function downloadFromData(jsonUrl) {
|
|||
process.stderr.write(`info[${outputPath}]: 0/${totalJobs} done\n`);
|
||||
const interval = setInterval(() => {
|
||||
process.stderr.write(
|
||||
`info[${outputPath}]: ${nFinished}/${totalJobs} done\n`
|
||||
`info[${outputPath}]: ${nFinished}/${totalJobs} done\n`,
|
||||
);
|
||||
}, 30000);
|
||||
await Promise.all(promises);
|
||||
|
@ -169,15 +172,6 @@ async function downloadFromData(jsonUrl) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} jsonUrlString
|
||||
*/
|
||||
export function generateOutputPath(jsonUrlString) {
|
||||
const jsonUrl = new URL(jsonUrlString);
|
||||
const outputPath = `${jsonUrl.host}${jsonUrl.pathname}`.replaceAll("/", "_");
|
||||
return outputPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @argument {DownloadJob} job
|
||||
* @argument {number} attempts
|
||||
|
@ -234,12 +228,12 @@ async function downloadDist({ dist, dataset, url, outputPath }) {
|
|||
const fileDirPath = join(
|
||||
outputPath,
|
||||
sanitizeSuffix(dataset.identifier),
|
||||
sanitizeSuffix(dist.identifier)
|
||||
sanitizeSuffix(dist.identifier),
|
||||
);
|
||||
await mkdir(fileDirPath, { recursive: true });
|
||||
const filePath = join(
|
||||
fileDirPath,
|
||||
sanitizeSuffix(dist.fileName || dist.identifier)
|
||||
sanitizeSuffix(dist.fileName || dist.identifier),
|
||||
);
|
||||
|
||||
if (!res.body) throw new Error("no body");
|
||||
|
@ -278,11 +272,11 @@ function sanitizeSuffix(path) {
|
|||
*/
|
||||
function chequearIdsDuplicados(jobs, id) {
|
||||
const duplicated = hasDuplicates(
|
||||
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
|
||||
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`),
|
||||
);
|
||||
if (duplicated) {
|
||||
console.error(
|
||||
`ADVERTENCIA[${id}]: ¡encontré duplicados! es posible que se pisen archivos entre si`
|
||||
`ADVERTENCIA[${id}]: ¡encontré duplicados! es posible que se pisen archivos entre si`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -340,3 +334,45 @@ function shuffleArray(array) {
|
|||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} portales
|
||||
*/
|
||||
function generateReadme(portales) {
|
||||
// basado en el readme de Patricio
|
||||
return `Dumps de Portales de Datos Abiertos de la República Argentina
|
||||
=============================================================
|
||||
|
||||
El zip contiene todo lo que se pudo descargar de los portales seleccionados, que fueron:
|
||||
${portales.map((p) => `- ${p}`).join("\n")}
|
||||
|
||||
La carpeta está ordenada en subcarpetas cuyo nombre corresponde al ID del dataset/distribución del portal. De esta forma,
|
||||
leyendo el data.json se puede programaticamente y de manera simple volver a mapear qué archivo le corresponde a cada
|
||||
distribución.
|
||||
|
||||
Formato:
|
||||
|
||||
- {url de data.json sin protocolo y con / reemplazado por _}/
|
||||
- data.json
|
||||
- errors.jsonl: archivo con todos los errores que se obtuvieron al intentar descargar todo.
|
||||
- {identifier de dataset}/
|
||||
- {identifier de distribution}/
|
||||
- {fileName (o, si no existe, identifier de distribution)}
|
||||
|
||||
Ejemplo:
|
||||
|
||||
- datos.gob.ar_data.json/
|
||||
- data.json
|
||||
- errors.jsonl
|
||||
- turismo_fbc269ea-5f71-45b6-b70c-8eb38a03b8db/
|
||||
- turismo_0774a0bb-71c2-44d7-9ea6-780e6bd06d50/
|
||||
- cruceristas-por-puerto-residencia-desagregado-por-pais-mes.csv
|
||||
- ...
|
||||
- energia_0d4a18ee-9371-439a-8a94-4f53a9822664/
|
||||
- energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866/
|
||||
- energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866 (este archivo no tiene fileName en el data.json, entonces se reutiliza el identifier)
|
||||
- ...
|
||||
|
||||
Este dump fue generado con transicion-desordenada-diablo: https://gitea.nulo.in/Nulo/transicion-desordenada-diablo
|
||||
`;
|
||||
}
|
||||
|
|
|
@ -1,98 +0,0 @@
|
|||
// @ts-check
|
||||
import { readFile, readdir, writeFile } from "node:fs/promises";
|
||||
import { zData } from "common/schema.js";
|
||||
import { join } from "node:path";
|
||||
|
||||
const dumpDir = process.argv[2];
|
||||
generateMetadata(dumpDir);
|
||||
|
||||
/**
|
||||
* @param {string} dumpDir
|
||||
*/
|
||||
async function generateMetadata(dumpDir) {
|
||||
if (!dumpDir) {
|
||||
console.error("Especifica una carpeta para generar los metadatos, porfa.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const files = await readdir(dumpDir, { withFileTypes: true });
|
||||
const sites = await Promise.all(
|
||||
files
|
||||
.filter((file) => file.isDirectory())
|
||||
.map(async (file) => {
|
||||
const path = join(file.path, file.name);
|
||||
const data = await loadDataJson(path);
|
||||
const url = await readFile(join(path, "url.txt"), "utf-8");
|
||||
return {
|
||||
title: data.title,
|
||||
description: data.description,
|
||||
url,
|
||||
path: file.name,
|
||||
};
|
||||
})
|
||||
);
|
||||
/** @type {import("common/schema.js").DumpMetadata} */
|
||||
const dumpMetadata = { sites };
|
||||
await writeFile(
|
||||
join(dumpDir, "dump-metadata.json"),
|
||||
JSON.stringify(dumpMetadata)
|
||||
);
|
||||
await writeFile(
|
||||
join(dumpDir, "readme.txt"),
|
||||
generateReadme(sites.map((s) => s.url))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} portales
|
||||
*/
|
||||
function generateReadme(portales) {
|
||||
// basado en el readme de Patricio
|
||||
return `Dumps de Portales de Datos Abiertos de la República Argentina
|
||||
=============================================================
|
||||
|
||||
Esta carpeta contiene todo lo que se pudo descargar de los portales seleccionados, que fueron:
|
||||
${portales.map((p) => `- ${p}`).join("\n")}
|
||||
|
||||
La carpeta está ordenada en subcarpetas cuyo nombre corresponde al ID del dataset/distribución del portal. De esta forma,
|
||||
leyendo el data.json se puede programaticamente y de manera simple volver a mapear qué archivo le corresponde a cada
|
||||
distribución.
|
||||
|
||||
Formato:
|
||||
|
||||
- {url de data.json sin protocolo y con / reemplazado por _}/
|
||||
- data.json
|
||||
- errors.jsonl: archivo con todos los errores que se obtuvieron al intentar descargar todo.
|
||||
- {identifier de dataset}/
|
||||
- {identifier de distribution}/
|
||||
- {fileName (o, si no existe, identifier de distribution)}
|
||||
|
||||
Ejemplo:
|
||||
|
||||
- datos.gob.ar_data.json/
|
||||
- data.json
|
||||
- errors.jsonl
|
||||
- turismo_fbc269ea-5f71-45b6-b70c-8eb38a03b8db/
|
||||
- turismo_0774a0bb-71c2-44d7-9ea6-780e6bd06d50/
|
||||
- cruceristas-por-puerto-residencia-desagregado-por-pais-mes.csv
|
||||
- ...
|
||||
- energia_0d4a18ee-9371-439a-8a94-4f53a9822664/
|
||||
- energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866/
|
||||
- energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866 (este archivo no tiene fileName en el data.json, entonces se reutiliza el identifier)
|
||||
- ...
|
||||
|
||||
Este dump fue generado con transicion-desordenada-diablo: https://gitea.nulo.in/Nulo/transicion-desordenada-diablo
|
||||
|
||||
Se puede usar el frontend en esa repo para ver el dump.
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} dir carpeta del dump
|
||||
*/
|
||||
async function loadDataJson(dir) {
|
||||
const text = await readFile(join(dir, "data.json"), "utf-8");
|
||||
const json = JSON.parse(text);
|
||||
const data = zData.parse(json);
|
||||
return data;
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "downloader",
|
||||
"name": "js",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
|
@ -12,11 +12,10 @@
|
|||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"p-limit": "^5.0.0",
|
||||
"undici": "^5.28.0",
|
||||
"common": "workspace:"
|
||||
"undici": "^5.28.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tsconfig/node20": "^20.1.2",
|
||||
"@types/node": "^20.10.0"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -29,15 +29,6 @@ docker run --rm -it -v ./data:/data gitea.nulo.in/nulo/transicion-desordenada-di
|
|||
# descarga datos.gob.ar
|
||||
```
|
||||
|
||||
## terminar dump
|
||||
|
||||
```
|
||||
# generar dump-metadata.json (útil para el frontend) y readme.txt
|
||||
node generate_dump_metadata.js data/
|
||||
# comprimir todo excepto readme.txt
|
||||
pigz -1r data/*/
|
||||
```
|
||||
|
||||
## formato de repo guardado
|
||||
|
||||
- `{url de data.json sin protocolo y con / reemplazado por _}/`
|
||||
|
@ -53,10 +44,10 @@ pigz -1r data/*/
|
|||
- `data.json`
|
||||
- `errors.jsonl`
|
||||
- `turismo_fbc269ea-5f71-45b6-b70c-8eb38a03b8db/`
|
||||
- `turismo_0774a0bb-71c2-44d7-9ea6-780e6bd06d50/`
|
||||
- `turismo_0774a0bb-71c2-44d7-9ea6-780e6bd06d50/`
|
||||
- `cruceristas-por-puerto-residencia-desagregado-por-pais-mes.csv`
|
||||
- ...
|
||||
- `energia_0d4a18ee-9371-439a-8a94-4f53a9822664/`
|
||||
- `energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866/`
|
||||
- `energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866` (este archivo no tiene fileName en el data.json, entonces se reutiliza el `identifier`)
|
||||
- `energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866/`
|
||||
- `energia_9f602b6e-2bef-4ac4-895d-f6ecd6bb1866` (este archivo no tiene fileName en el data.json, entonces se reutiliza el `identifier`)
|
||||
- ...
|
||||
|
|
|
@ -13,11 +13,5 @@
|
|||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node16"
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"**/*.js",
|
||||
// https://github.com/microsoft/TypeScript/issues/33136#issuecomment-578699134
|
||||
"../common/**/*.js"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,11 +11,9 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^3.0.0",
|
||||
"@tailwindcss/typography": "^0.5.10",
|
||||
"@tsconfig/svelte": "^5.0.2",
|
||||
"@types/streamsaver": "^2.0.4",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"common": "workspace:",
|
||||
"postcss": "^8.4.32",
|
||||
"prettier": "^3.1.0",
|
||||
"prettier-plugin-tailwindcss": "^0.5.9",
|
||||
|
|
|
@ -1,24 +1,19 @@
|
|||
<script lang="ts">
|
||||
import type { Params } from "navaid";
|
||||
import { currentRoute, type ComponentType } from "./lib/router";
|
||||
|
||||
import NotFound from "./lib/routes/NotFound.svelte";
|
||||
import Home from "./lib/routes/Home.svelte";
|
||||
import Dump from "./lib/routes/Dump.svelte";
|
||||
import Portal from "./lib/routes/Portal.svelte";
|
||||
import DumpIndex from "./lib/routes/DumpIndex.svelte";
|
||||
import Dataset from "./lib/routes/Dataset.svelte";
|
||||
|
||||
function chooseComponent(route: ComponentType) {
|
||||
if (route === "NotFound") return NotFound;
|
||||
else if (route === "Home") return Home;
|
||||
else if (route === "Dataset") return Dataset;
|
||||
else if (route === "Portal") return Portal;
|
||||
else if (route === "Dump") return Dump;
|
||||
else if (route === "DumpIndex") return DumpIndex;
|
||||
}
|
||||
|
||||
$: r = {
|
||||
component: chooseComponent($currentRoute.component) as any,
|
||||
params: $currentRoute.params as any,
|
||||
};
|
||||
$: component = chooseComponent($currentRoute.component);
|
||||
$: params = $currentRoute.params as any;
|
||||
</script>
|
||||
|
||||
<svelte:component this={r.component} params={r.params} />
|
||||
<svelte:component this={component} {params} />
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import streamSaver from "streamsaver";
|
||||
import { zData, type Distribution, zError, zDumpMetadata } from "common/schema";
|
||||
import { zData, type Distribution, zError } from "./schema";
|
||||
|
||||
export async function downloadFile(
|
||||
dataPath: string,
|
||||
|
@ -48,16 +48,13 @@ async function loadGzippedJson(url: string): Promise<unknown> {
|
|||
return json;
|
||||
}
|
||||
|
||||
export async function fetchData(portalUrl: string) {
|
||||
const json = await loadGzippedJson(`${portalUrl}/data.json.gz`);
|
||||
const endpoint = "http://localhost:8081";
|
||||
export const gobData = `${endpoint}/datos.gob.ar_data.json`;
|
||||
export async function fetchData(url: string) {
|
||||
const json = await loadGzippedJson(`${url}/data.json.gz`);
|
||||
if (import.meta.env.DEV) console.debug(json);
|
||||
return zData.parse(json);
|
||||
}
|
||||
export async function fetchDumpMetadata(dumpUrl: string) {
|
||||
const json = await loadGzippedJson(`${dumpUrl}/dump-metadata.json.gz`);
|
||||
if (import.meta.env.DEV) console.debug(json);
|
||||
return zDumpMetadata.parse(json);
|
||||
}
|
||||
export async function fetchErrors(url: string) {
|
||||
const res = await fetchGzipped(`${url}/errors.jsonl.gz`);
|
||||
const text = await res.text();
|
|
@ -1,55 +0,0 @@
|
|||
<script lang="ts">
|
||||
import { inject } from "regexparam";
|
||||
import ChevronRight from "eva-icons/outline/svg/chevron-right-outline.svg?component";
|
||||
import { routes } from "../router";
|
||||
import Portal from "../routes/Portal.svelte";
|
||||
|
||||
export let params:
|
||||
| { dumpUrl: string }
|
||||
| { dumpUrl: string; portal: string }
|
||||
| { dumpUrl: string; portal: string; id: string };
|
||||
|
||||
function generateDumpName(dumpUrl: string) {
|
||||
const clean = decodeURIComponent(dumpUrl).replace(/\/+$/, "");
|
||||
return clean.slice(clean.lastIndexOf("/") + 1);
|
||||
}
|
||||
|
||||
$: dumpName = generateDumpName(params.dumpUrl);
|
||||
</script>
|
||||
|
||||
<nav class="flex justify-between m-2">
|
||||
<ol
|
||||
class="flex items-center mb-3 text-sm text-neutral-500 [&_.active-breadcrumb]:text-neutral-600 [&_.active-breadcrumb]:font-medium sm:mb-0"
|
||||
>
|
||||
<li class="flex items-center h-full">
|
||||
<a
|
||||
href={inject(routes.Dump, params)}
|
||||
class="inline-flex items-center px-2 py-1.5 space-x-1.5 rounded-md hover:text-neutral-900 hover:bg-neutral-100"
|
||||
>
|
||||
<span>{dumpName}</span>
|
||||
</a>
|
||||
</li>
|
||||
{#if "portal" in params}
|
||||
<ChevronRight class="w-5 h-5 text-gray-400" fill="currentColor" />
|
||||
<li>
|
||||
<a
|
||||
href={inject(routes.Portal, params)}
|
||||
class="inline-flex items-center px-2 py-1.5 space-x-1.5 font-normal rounded-md hover:bg-neutral-100 hover:text-neutral-900"
|
||||
>
|
||||
<span>{params.portal}</span>
|
||||
</a>
|
||||
</li>
|
||||
{/if}
|
||||
{#if "id" in params}
|
||||
<ChevronRight class="w-5 h-5 text-gray-400" fill="currentColor" />
|
||||
<li>
|
||||
<a
|
||||
href={inject(routes.Dataset, params)}
|
||||
class="inline-flex items-center px-2 py-1.5 space-x-1.5 font-normal rounded-md hover:bg-neutral-100 hover:text-neutral-900"
|
||||
>
|
||||
<span>{params.id}</span>
|
||||
</a>
|
||||
</li>
|
||||
{/if}
|
||||
</ol>
|
||||
</nav>
|
|
@ -2,13 +2,11 @@ import navaid, { type Params } from "navaid";
|
|||
import { writable } from "svelte/store";
|
||||
|
||||
export const routes = {
|
||||
Home: "/",
|
||||
Dump: "/dump/:dumpUrl",
|
||||
Portal: "/dump/:dumpUrl/:portal",
|
||||
Dataset: "/dump/:dumpUrl/:portal/dataset/:id",
|
||||
DumpIndex: "/d/:dumpUrl",
|
||||
Dataset: "/d/:dumpUrl/dataset/:id",
|
||||
};
|
||||
|
||||
export type ComponentType = "NotFound" | keyof typeof routes;
|
||||
export type ComponentType = "NotFound" | "DumpIndex" | "Dataset";
|
||||
|
||||
type Route = {
|
||||
component: ComponentType;
|
||||
|
@ -17,11 +15,12 @@ type Route = {
|
|||
export const currentRoute = writable<Route>();
|
||||
|
||||
export const router = navaid(undefined, () =>
|
||||
currentRoute.set({ component: "NotFound" }),
|
||||
currentRoute.set({ component: "NotFound" })
|
||||
);
|
||||
router.on(routes.DumpIndex, (params) =>
|
||||
currentRoute.set({ component: "DumpIndex", params })
|
||||
);
|
||||
router.on(routes.Dataset, (params) =>
|
||||
currentRoute.set({ component: "Dataset", params })
|
||||
);
|
||||
for (const [component, path] of Object.entries(routes)) {
|
||||
router.on(path, (params) =>
|
||||
currentRoute.set({ component: component as keyof typeof routes, params }),
|
||||
);
|
||||
}
|
||||
router.listen();
|
||||
|
|
|
@ -1,23 +1,20 @@
|
|||
<script lang="ts">
|
||||
import ArrowBack from "eva-icons/outline/svg/arrow-back-outline.svg?component";
|
||||
import ExternalLink from "eva-icons/outline/svg/external-link-outline.svg?component";
|
||||
import { downloadFile, fetchData, fetchErrors } from "../fetch";
|
||||
import { downloadFile, fetchData, fetchErrors } from "../dump";
|
||||
import NotFound from "./NotFound.svelte";
|
||||
import { inject } from "regexparam";
|
||||
import { routes } from "../router";
|
||||
import Nav from "../nav/Nav.svelte";
|
||||
|
||||
export let params: { dumpUrl: string; portal: string; id: string };
|
||||
$: url = decodeURIComponent(params.dumpUrl) + "/" + params.portal;
|
||||
export let params: { dumpUrl: string; id: string };
|
||||
const url = decodeURIComponent(params.dumpUrl);
|
||||
|
||||
$: data = Promise.all([fetchData(url), fetchErrors(url)]).then(
|
||||
const data = Promise.all([fetchData(url), fetchErrors(url)]).then(
|
||||
([data, errors]) => ({ data, errors }),
|
||||
);
|
||||
</script>
|
||||
|
||||
<main class="mx-auto max-w-3xl">
|
||||
<Nav {params} />
|
||||
|
||||
<div class="rounded-lg border bg-white m-2">
|
||||
{#await data}
|
||||
<p class="p-6">Cargando dataset...</p>
|
||||
|
@ -30,10 +27,7 @@
|
|||
<small>
|
||||
<a
|
||||
class="flex text-blue-500 leading-none gap-1 items-center"
|
||||
href={inject(routes.Portal, {
|
||||
dumpUrl: params.dumpUrl,
|
||||
portal: params.portal,
|
||||
})}
|
||||
href={inject(routes.DumpIndex, { dumpUrl: params.dumpUrl })}
|
||||
>
|
||||
<ArrowBack fill="currentColor" class="h-[1.25em]" /> Viendo {data.title}
|
||||
</a>
|
||||
|
@ -74,15 +68,9 @@
|
|||
</span>
|
||||
{/if}
|
||||
</h3>
|
||||
{#if !dist.downloadURL}
|
||||
{#if error}
|
||||
<small class="block text-red-700">
|
||||
No está en este archivo porque el link de descarga estaba
|
||||
roto en la fuente al momento de descargarlo :(
|
||||
</small>
|
||||
{:else if error}
|
||||
<small class="block text-red-700">
|
||||
No está en este archivo porque hubo un error al descargarlo
|
||||
:(
|
||||
No está en este dump porque hubo un error al descargarlo :(
|
||||
</small>
|
||||
{/if}
|
||||
{#if dist.fileName}
|
||||
|
@ -98,17 +86,15 @@
|
|||
>Descargar</button
|
||||
>
|
||||
{/if}
|
||||
{#if dist.downloadURL}
|
||||
<a
|
||||
class="flex items-center leading-none text-gray-600 gap-1 pt-2"
|
||||
href={dist.downloadURL}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
>
|
||||
<ExternalLink fill="currentColor" class="h-4" />
|
||||
Fuente
|
||||
</a>
|
||||
{/if}
|
||||
<a
|
||||
class="flex items-center leading-none text-gray-600 gap-1 pt-2"
|
||||
href={dist.downloadURL}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
>
|
||||
<ExternalLink fill="currentColor" class="h-4" />
|
||||
Fuente
|
||||
</a>
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
<script lang="ts">
|
||||
import { inject } from "regexparam";
|
||||
import ExternalLink from "eva-icons/outline/svg/external-link-outline.svg?component";
|
||||
import { fetchDumpMetadata } from "../fetch";
|
||||
import { routes } from "../router";
|
||||
|
||||
export let params: { dumpUrl: string };
|
||||
$: url = decodeURIComponent(params.dumpUrl);
|
||||
|
||||
$: metadataPromise = fetchDumpMetadata(url);
|
||||
</script>
|
||||
|
||||
<main class="mx-auto max-w-3xl">
|
||||
<div class="rounded-lg border bg-white m-2">
|
||||
{#await metadataPromise}
|
||||
<p class="p-6">Cargando..</p>
|
||||
{:then metadata}
|
||||
<header class="py-5 px-6 border-b border-b-gray-200 leading-none">
|
||||
<small>
|
||||
Viendo archivo en
|
||||
<a
|
||||
class="underline text-blue-500"
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
href={url}>{url}</a
|
||||
>
|
||||
</small>
|
||||
<!-- <h1 class="font-bold text-3xl">{data.title}</h1>
|
||||
<p class="text-xl">{data.description}</p>
|
||||
{#if data.homepage}
|
||||
<a
|
||||
class="flex items-center leading-none text-gray-600 gap-1 pt-2"
|
||||
href={arreglarHomepageUrl(data.homepage)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
>
|
||||
<ExternalLink fill="currentColor" class="h-4" />
|
||||
Fuente
|
||||
</a>
|
||||
{/if} -->
|
||||
</header>
|
||||
|
||||
<ul class="divide-y divide-gray-100">
|
||||
{#each metadata.sites as site}
|
||||
{@const portalLink = inject(routes.Portal, {
|
||||
dumpUrl: params.dumpUrl,
|
||||
portal: site.path,
|
||||
})}
|
||||
<li>
|
||||
<div class="flex px-6 py-5 justify-between gap-3">
|
||||
<div class="flex flex-col">
|
||||
<h3 class="text-lg">{site.title}</h3>
|
||||
<p class="text-sm">{site.description}</p>
|
||||
</div>
|
||||
<div class="flex flex-col items-center justify-center shrink-0">
|
||||
<a
|
||||
href={portalLink}
|
||||
class="inline-flex items-center justify-center px-4 py-2 text-sm font-medium tracking-wide text-white transition-colors duration-200 bg-blue-600 rounded-md hover:bg-blue-700 focus:ring-2 focus:ring-offset-2 focus:ring-blue-700 focus:shadow-outline focus:outline-none"
|
||||
>Ver portal</a
|
||||
>
|
||||
<a
|
||||
class="flex items-center leading-none text-gray-600 gap-1 pt-2"
|
||||
href={site.url}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
>
|
||||
<ExternalLink fill="currentColor" class="h-4" />
|
||||
Fuente
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:catch error}
|
||||
Hubo un error intenando cargar este archivo. <pre>{error}</pre>
|
||||
{/await}
|
||||
</div>
|
||||
</main>
|
|
@ -2,15 +2,14 @@
|
|||
import { inject } from "regexparam";
|
||||
import ArrowForward from "eva-icons/outline/svg/arrow-forward-outline.svg?component";
|
||||
import ExternalLink from "eva-icons/outline/svg/external-link-outline.svg?component";
|
||||
import { fetchData, fetchErrors } from "../fetch";
|
||||
import { fetchData, fetchErrors } from "../dump";
|
||||
import { routes } from "../router";
|
||||
import type { Dataset } from "common/schema";
|
||||
import Nav from "../nav/Nav.svelte";
|
||||
import type { Dataset } from "../schema";
|
||||
|
||||
export let params: { dumpUrl: string; portal: string };
|
||||
$: url = `${decodeURIComponent(params.dumpUrl)}/${params.portal}`;
|
||||
export let params: { dumpUrl: string };
|
||||
const url = decodeURIComponent(params.dumpUrl);
|
||||
|
||||
$: data = Promise.all([fetchData(url), fetchErrors(url)]).then(
|
||||
const data = Promise.all([fetchData(url), fetchErrors(url)]).then(
|
||||
([data, errors]) => ({ data, errors }),
|
||||
);
|
||||
|
||||
|
@ -20,36 +19,21 @@
|
|||
return url;
|
||||
}
|
||||
|
||||
function processStringForSearch(str: string): string {
|
||||
return str
|
||||
.toLowerCase()
|
||||
.replaceAll("á", "a")
|
||||
.replaceAll("é", "e")
|
||||
.replaceAll("í", "i")
|
||||
.replaceAll("ó", "o")
|
||||
.replaceAll("ú", "u")
|
||||
.replaceAll("ñ", "n");
|
||||
}
|
||||
|
||||
let query: string = "";
|
||||
function filterDatasets(datasets: Dataset[], query: string): Dataset[] {
|
||||
const q = processStringForSearch(query);
|
||||
return datasets.filter(
|
||||
(dataset) =>
|
||||
processStringForSearch(dataset.identifier).includes(q) ||
|
||||
processStringForSearch(dataset.title).includes(q),
|
||||
dataset.identifier.includes(query) || dataset.title.includes(query),
|
||||
);
|
||||
}
|
||||
</script>
|
||||
|
||||
<main class="mx-auto max-w-3xl">
|
||||
<Nav {params} />
|
||||
|
||||
<div class="rounded-lg border bg-white m-2">
|
||||
{#await data}
|
||||
<p class="p-6">Cargando..</p>
|
||||
{:then { data, errors }}
|
||||
<header class="py-5 px-6 border-b border-b-gray-200 leading-none">
|
||||
<header class="py-5 px-6 border-b border-b-gray-200">
|
||||
<small>
|
||||
Viendo portal archivado de
|
||||
<a
|
||||
|
@ -87,7 +71,6 @@
|
|||
{#each filterDatasets(data.dataset, query) as dataset}
|
||||
{@const datasetLink = inject(routes.Dataset, {
|
||||
dumpUrl: params.dumpUrl,
|
||||
portal: params.portal,
|
||||
id: dataset.identifier,
|
||||
})}
|
||||
<li>
|
||||
|
@ -109,7 +92,7 @@
|
|||
{/each}
|
||||
</ul>
|
||||
{:catch error}
|
||||
Hubo un error intenando cargar este portal archivado. <pre>{error}</pre>
|
||||
Hubo un error intenando cargar este dump. <pre>{error}</pre>
|
||||
{/await}
|
||||
</div>
|
||||
</main>
|
|
@ -1,44 +0,0 @@
|
|||
<script lang="ts">
|
||||
import { inject } from "regexparam";
|
||||
import { routes } from "../router";
|
||||
</script>
|
||||
|
||||
<main class="p-2">
|
||||
<div class="mx-auto rounded-lg border bg-white py-5 px-6 prose">
|
||||
<h1>Archivo de portales de datos abiertos</h1>
|
||||
<p>
|
||||
Esta herramienta permite ver datos en archivos de portales de datos
|
||||
abiertos de <a
|
||||
href="https://github.com/catdevnull/transicion-desordenada-diablo/"
|
||||
rel="noopener">transicion-desordenada-diablo</a
|
||||
>
|
||||
(un mejor nombre sería genial), creada en el marco de
|
||||
<a href="https://bit.ly/CartaDatosAbiertos">un pedido hecho</a> al gobierno
|
||||
entrante el 10 de diciembre de 2023 por garantizar el mantenimiento de las
|
||||
políticas de datos públicos en Argentina.
|
||||
</p>
|
||||
|
||||
<div class="not-prose flex place-content-center">
|
||||
<a
|
||||
href={inject(routes.Dump, {
|
||||
dumpUrl: encodeURIComponent(
|
||||
"https://archivos.nulo.ar/dump-2023-12-08/",
|
||||
),
|
||||
})}
|
||||
class="flex items-center justify-center px-4 py-2 text-xl font-medium text-white transition-colors duration-200 bg-blue-600 rounded-md hover:bg-blue-700 focus:ring-2 focus:ring-offset-2 focus:ring-blue-700 focus:shadow-outline focus:outline-none text-center"
|
||||
>
|
||||
Acceder al archivo creado el 8 de diciembre de 2023
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<p>
|
||||
Los archivos y las herramientas fueron creados por
|
||||
<a href="https://nulo.ar">Nulo</a> con ayuda de varias personas. El código
|
||||
está disponible
|
||||
<a
|
||||
href="https://github.com/catdevnull/transicion-desordenada-diablo/"
|
||||
rel="noopener">en GitHub</a
|
||||
>.
|
||||
</p>
|
||||
</div>
|
||||
</main>
|
|
@ -12,7 +12,7 @@ export const zDistribution = z.object({
|
|||
title: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
/** @typedef {z.infer<typeof zDistribution>} Distribution */
|
||||
export type Distribution = z.infer<typeof zDistribution>;
|
||||
export const zDataset = z.object({
|
||||
identifier: z.string(),
|
||||
title: z.string(),
|
||||
|
@ -21,7 +21,7 @@ export const zDataset = z.object({
|
|||
distribution: z.array(zDistribution),
|
||||
landingPage: z.string().optional(),
|
||||
});
|
||||
/** @typedef {z.infer<typeof zDataset>} Dataset */
|
||||
export type Dataset = z.infer<typeof zDataset>;
|
||||
export const zData = z.object({
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
|
@ -36,15 +36,3 @@ export const zError = z.object({
|
|||
kind: z.enum(["generic_error", "http_error", "infinite_redirect"]),
|
||||
error: z.string().optional(),
|
||||
});
|
||||
|
||||
export const zDumpMetadata = z.object({
|
||||
sites: z.array(
|
||||
z.object({
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
url: z.string(),
|
||||
path: z.string(),
|
||||
})
|
||||
),
|
||||
});
|
||||
/** @typedef {z.infer<typeof zDumpMetadata>} DumpMetadata */
|
|
@ -4,5 +4,5 @@ export default {
|
|||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [require("@tailwindcss/typography")],
|
||||
plugins: [],
|
||||
};
|
||||
|
|
|
@ -15,12 +15,6 @@
|
|||
"checkJs": true,
|
||||
"isolatedModules": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"src/**/*.js",
|
||||
"src/**/*.svelte",
|
||||
// https://github.com/microsoft/TypeScript/issues/33136#issuecomment-578699134
|
||||
"../common/**/*.js"
|
||||
],
|
||||
"include": ["src/**/*.ts", "src/**/*.js", "src/**/*.svelte"],
|
||||
"references": [{ "path": "./tsconfig.node.json" }]
|
||||
}
|
||||
|
|
1779
pnpm-lock.yaml
1779
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
|
@ -1,4 +0,0 @@
|
|||
packages:
|
||||
- "frontend/"
|
||||
- "downloader/"
|
||||
- "common/"
|
17
readme.md
17
readme.md
|
@ -1,17 +0,0 @@
|
|||
# Transicion Desordeanada (diablo)
|
||||
|
||||
Herramientas para descargar masivamente portales de datos abiertos y generar un archivo, que luego se puede ver en una página web.
|
||||
|
||||
## [Downloader](./downloader)
|
||||
|
||||
El descargador.
|
||||
|
||||
## [Frontend](./frontend)
|
||||
|
||||
La página web para ver el archivo generado.
|
||||
|
||||
## Glosario
|
||||
|
||||
- Portal (de datos): algo que tiene un data.json en un formato similar a [DCAT 2](https://www.w3.org/TR/vocab-dcat-2/) (suelen ser portales [CKAN](https://ckan.org/))
|
||||
- Archivo (dump): una versión descargada de uno o varios portales de datos
|
||||
- Dataset: conjunto de archivos que suelen estar relacionados
|
Loading…
Reference in a new issue