mirror of
https://github.com/catdevnull/preciazo.git
synced 2025-02-22 21:16:27 +00:00
parent
71eeaef547
commit
57aaad78b0
2 changed files with 4 additions and 9 deletions
3
.github/workflows/sepa-precios-archiver.yml
vendored
3
.github/workflows/sepa-precios-archiver.yml
vendored
|
@ -27,8 +27,7 @@ jobs:
|
||||||
B2_BUCKET_NAME: ${{ secrets.B2_BUCKET_NAME }}
|
B2_BUCKET_NAME: ${{ secrets.B2_BUCKET_NAME }}
|
||||||
B2_BUCKET_KEY_ID: ${{ secrets.B2_BUCKET_KEY_ID }}
|
B2_BUCKET_KEY_ID: ${{ secrets.B2_BUCKET_KEY_ID }}
|
||||||
B2_BUCKET_KEY: ${{ secrets.B2_BUCKET_KEY }}
|
B2_BUCKET_KEY: ${{ secrets.B2_BUCKET_KEY }}
|
||||||
# DATOS_PRODUCCION_GOB_AR: https://proxy-datos-produccion-gob-ar.nulo.in
|
DATOS_PRODUCCION_GOB_AR: https://proxy-datos-produccion-gob-ar.nulo.in
|
||||||
PROXY_URI: ${{ secrets.PROXY_URI }}
|
|
||||||
run: |
|
run: |
|
||||||
cd sepa
|
cd sepa
|
||||||
bun install --frozen-lockfile
|
bun install --frozen-lockfile
|
||||||
|
|
|
@ -37,16 +37,12 @@ const s3 = new S3Client({
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const CURL_PROXY_ARG = process.env.PROXY_URI
|
|
||||||
? { raw: `-x ${process.env.PROXY_URI}` }
|
|
||||||
: "";
|
|
||||||
|
|
||||||
async function getRawDatasetInfo(attempts = 0) {
|
async function getRawDatasetInfo(attempts = 0) {
|
||||||
try {
|
try {
|
||||||
const url = processUrl(
|
const url = processUrl(
|
||||||
"https://datos.produccion.gob.ar/api/3/action/package_show?id=sepa-precios"
|
"https://datos.produccion.gob.ar/api/3/action/package_show?id=sepa-precios"
|
||||||
);
|
);
|
||||||
return await $`curl ${CURL_PROXY_ARG} -L ${url}`.json();
|
return await $`curl -L ${url}`.json();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (attempts >= 4) {
|
if (attempts >= 4) {
|
||||||
console.error(
|
console.error(
|
||||||
|
@ -150,7 +146,7 @@ for (const resource of datasetInfo.result.resources) {
|
||||||
try {
|
try {
|
||||||
const zip = join(dir, "zip");
|
const zip = join(dir, "zip");
|
||||||
const url = processUrl(resource.url);
|
const url = processUrl(resource.url);
|
||||||
await $`curl ${CURL_PROXY_ARG} --retry 8 --retry-delay 5 --retry-all-errors -L -o ${zip} ${url}`;
|
await $`curl --retry 8 --retry-delay 5 --retry-all-errors -L -o ${zip} ${url}`;
|
||||||
await $`unzip ${zip} -d ${dir}`;
|
await $`unzip ${zip} -d ${dir}`;
|
||||||
await rm(zip);
|
await rm(zip);
|
||||||
|
|
||||||
|
@ -178,7 +174,7 @@ for (const resource of datasetInfo.result.resources) {
|
||||||
const fileName = `${resource.id}-${basename(resource.url)}`;
|
const fileName = `${resource.id}-${basename(resource.url)}`;
|
||||||
if (await checkFileExistsInB2(fileName)) continue;
|
if (await checkFileExistsInB2(fileName)) continue;
|
||||||
console.log(`⬇️ Downloading and reuploading ${resource.url}`);
|
console.log(`⬇️ Downloading and reuploading ${resource.url}`);
|
||||||
const response = await $`curl ${CURL_PROXY_ARG} -L ${resource.url}`.blob();
|
const response = await $`curl -L ${resource.url}`.blob();
|
||||||
|
|
||||||
await uploadToB2Bucket(fileName, response);
|
await uploadToB2Bucket(fileName, response);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue