mirror of
https://github.com/catdevnull/preciazo.git
synced 2024-11-25 19:16:19 +00:00
usar proxy para datos.produccion.gob.ar
This commit is contained in:
parent
d685ef8f75
commit
9fced663ff
2 changed files with 12 additions and 5 deletions
4
.github/workflows/sepa-precios-archiver.yml
vendored
4
.github/workflows/sepa-precios-archiver.yml
vendored
|
@ -27,10 +27,8 @@ jobs:
|
||||||
B2_BUCKET_NAME: ${{ secrets.B2_BUCKET_NAME }}
|
B2_BUCKET_NAME: ${{ secrets.B2_BUCKET_NAME }}
|
||||||
B2_BUCKET_KEY_ID: ${{ secrets.B2_BUCKET_KEY_ID }}
|
B2_BUCKET_KEY_ID: ${{ secrets.B2_BUCKET_KEY_ID }}
|
||||||
B2_BUCKET_KEY: ${{ secrets.B2_BUCKET_KEY }}
|
B2_BUCKET_KEY: ${{ secrets.B2_BUCKET_KEY }}
|
||||||
|
DATOS_PRODUCCION_GOB_AR: https://proxy-datos-produccion-gob-ar.nulo.in
|
||||||
run: |
|
run: |
|
||||||
# usar un servidor especifico porque parece que a veces
|
|
||||||
# bloquean el acceso desde afuera del país
|
|
||||||
sudo echo "190.2.53.185 datos.produccion.gob.ar" | sudo tee -a /etc/hosts
|
|
||||||
cd sepa
|
cd sepa
|
||||||
bun install --frozen-lockfile
|
bun install --frozen-lockfile
|
||||||
bun archiver.ts
|
bun archiver.ts
|
||||||
|
|
|
@ -23,6 +23,11 @@ const B2_BUCKET_NAME = checkEnvVariable("B2_BUCKET_NAME");
|
||||||
const B2_BUCKET_KEY_ID = checkEnvVariable("B2_BUCKET_KEY_ID");
|
const B2_BUCKET_KEY_ID = checkEnvVariable("B2_BUCKET_KEY_ID");
|
||||||
const B2_BUCKET_KEY = checkEnvVariable("B2_BUCKET_KEY");
|
const B2_BUCKET_KEY = checkEnvVariable("B2_BUCKET_KEY");
|
||||||
|
|
||||||
|
const DATOS_PRODUCCION_GOB_AR =
|
||||||
|
process.env.DATOS_PRODUCCION_GOB_AR || "https://datos.produccion.gob.ar";
|
||||||
|
const processUrl = (url: string) =>
|
||||||
|
url.replace(/^https:\/\/datos\.produccion\.gob\.ar/, DATOS_PRODUCCION_GOB_AR);
|
||||||
|
|
||||||
const s3 = new S3Client({
|
const s3 = new S3Client({
|
||||||
endpoint: "https://s3.us-west-004.backblazeb2.com",
|
endpoint: "https://s3.us-west-004.backblazeb2.com",
|
||||||
region: "us-west-004",
|
region: "us-west-004",
|
||||||
|
@ -34,7 +39,10 @@ const s3 = new S3Client({
|
||||||
|
|
||||||
async function getRawDatasetInfo(attempts = 0) {
|
async function getRawDatasetInfo(attempts = 0) {
|
||||||
try {
|
try {
|
||||||
return await $`curl -L https://datos.produccion.gob.ar/api/3/action/package_show?id=sepa-precios`.json();
|
const url = processUrl(
|
||||||
|
"https://datos.produccion.gob.ar/api/3/action/package_show?id=sepa-precios"
|
||||||
|
);
|
||||||
|
return await $`curl -L ${url}`.json();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (attempts >= 4) {
|
if (attempts >= 4) {
|
||||||
console.error(`❌ Error fetching dataset info`, error);
|
console.error(`❌ Error fetching dataset info`, error);
|
||||||
|
@ -137,7 +145,8 @@ for (const resource of datasetInfo.result.resources) {
|
||||||
console.info(dir);
|
console.info(dir);
|
||||||
try {
|
try {
|
||||||
const zip = join(dir, "zip");
|
const zip = join(dir, "zip");
|
||||||
await $`curl --retry 8 --retry-delay 5 --retry-all-errors -L -o ${zip} ${resource.url}`;
|
const url = processUrl(resource.url);
|
||||||
|
await $`curl --retry 8 --retry-delay 5 --retry-all-errors -L -o ${zip} ${url}`;
|
||||||
await $`unzip ${zip} -d ${dir}`;
|
await $`unzip ${zip} -d ${dir}`;
|
||||||
await rm(zip);
|
await rm(zip);
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue