Compare commits

...

2 commits

Author SHA1 Message Date
ca71dba59c retry action automatically 2024-09-16 10:19:15 -03:00
ad01f99fc3 ubicloud again... 2024-09-16 09:51:08 -03:00
2 changed files with 15 additions and 6 deletions

View file

@ -7,8 +7,8 @@ on:
jobs:
archive-prices:
# runs-on: ubicloud-standard-4
runs-on: ubuntu-latest
runs-on: ubicloud-standard-2
# runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@ -23,6 +23,7 @@ jobs:
- name: Run archiver script
env:
GITHUB_TOKEN: ${{ secrets.ARCHIVE_GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets.GH_PRECIAZO_TOKEN }}
B2_BUCKET_NAME: ${{ secrets.B2_BUCKET_NAME }}
B2_BUCKET_KEY_ID: ${{ secrets.B2_BUCKET_KEY_ID }}
B2_BUCKET_KEY: ${{ secrets.B2_BUCKET_KEY }}

View file

@ -32,17 +32,25 @@ const s3 = new S3Client({
},
});
async function getRawDatasetInfo() {
async function getRawDatasetInfo(attempts = 0) {
try {
return await $`curl -L https://datos.produccion.gob.ar/api/3/action/package_show?id=sepa-precios`.json();
} catch (error) {
if (attempts >= 4) {
console.error(`❌ Error fetching dataset info`, error);
if (process.env.GITHUB_RUN_ID) {
console.info(`🔄 Retrying action`);
await $`gh run rerun ${process.env.GITHUB_RUN_ID} --workflow sepa-precios-archiver`;
}
process.exit(1);
}
console.error(
`❌ Error fetching dataset info`,
error,
`retrying in 5min...`
`retrying in 30s...`
);
await new Promise((resolve) => setTimeout(resolve, 5 * 60 * 1000));
return await getRawDatasetInfo();
await new Promise((resolve) => setTimeout(resolve, 30 * 1000));
return await getRawDatasetInfo(attempts + 1);
}
}
async function saveFileIntoRepo(fileName: string, fileContent: string) {