testear y documentar check duplicados

This commit is contained in:
Cat /dev/Nulo 2023-11-27 20:13:14 -03:00
parent fb67c517f3
commit e1e851f797
2 changed files with 7 additions and 16 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
node_modules/ node_modules/
dataJsons/ dataJsons/
log log
prueba

View file

@ -36,13 +36,16 @@ const jsonString = json.join("");
const parsed = JSON.parse(jsonString); const parsed = JSON.parse(jsonString);
const jobs = parsed.dataset.flatMap((dataset) => const jobs = parsed.dataset.flatMap((dataset) =>
dataset.distribution.map((dist) => ({ dataset, dist })) dataset.distribution.map((dist) => ({
dataset,
dist,
url: new URL(dist.downloadURL),
}))
); );
// forma barrani de distribuir carga entre servidores
shuffleArray(jobs);
const totalJobs = jobs.length; const totalJobs = jobs.length;
let nFinished = 0; let nFinished = 0;
// por las dudas verificar que no hayan archivos duplicados
const duplicated = hasDuplicates( const duplicated = hasDuplicates(
jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`) jobs.map((j) => `${j.dataset.identifier}/${j.dist.identifier}`)
); );
@ -152,19 +155,6 @@ function hasDuplicates(array) {
return new Set(array).size !== array.length; return new Set(array).size !== array.length;
} }
// https://stackoverflow.com/a/12646864
/**
* @argument {any[]} array
*/
function shuffleArray(array) {
for (var i = array.length - 1; i > 0; i--) {
var j = Math.floor(Math.random() * (i + 1));
var temp = array[i];
array[i] = array[j];
array[j] = temp;
}
}
/** /**
* @argument {number} ms * @argument {number} ms
*/ */