5
0
Fork 0
mirror of https://0xacab.org/sutty/sutty synced 2024-06-29 08:26:07 +00:00

fix: dump de producción
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed

This commit is contained in:
Sutty 2023-05-16 16:38:49 +00:00
parent 5b562643dd
commit 165594a7ee
3 changed files with 2269 additions and 620 deletions

View file

@ -45,6 +45,8 @@ module Sutty
config.active_storage.queues.purge = :default
config.active_job.queue_adapter = :que
config.active_record.schema_format = :sql
config.to_prepare do
Dir.glob(File.join(File.dirname(__FILE__), '..', 'app', '**', '*_decorator.rb')).sort.each do |c|
Rails.configuration.cache_classes ? require(c) : load(c)

View file

@ -1,620 +0,0 @@
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2023_04_24_174544) do
# These are extensions that must be enabled in order to support this database
enable_extension "dblink"
enable_extension "pg_trgm"
enable_extension "pgcrypto"
enable_extension "plpgsql"
create_table "access_logs", id: :uuid, default: -> { "public.gen_random_uuid()" }, force: :cascade do |t|
t.string "host"
t.float "msec"
t.string "server_protocol"
t.string "request_method"
t.string "request_completion"
t.string "uri"
t.string "query_string"
t.integer "status"
t.string "sent_http_content_type"
t.string "sent_http_content_encoding"
t.string "sent_http_etag"
t.string "sent_http_last_modified"
t.string "http_accept"
t.string "http_accept_encoding"
t.string "http_accept_language"
t.string "http_pragma"
t.string "http_cache_control"
t.string "http_if_none_match"
t.string "http_dnt"
t.string "http_user_agent"
t.string "http_origin"
t.float "request_time"
t.integer "bytes_sent"
t.integer "body_bytes_sent"
t.integer "request_length"
t.string "http_connection"
t.string "pipe"
t.integer "connection_requests"
t.string "geoip2_data_country_name"
t.string "geoip2_data_city_name"
t.string "ssl_server_name"
t.string "ssl_protocol"
t.string "ssl_early_data"
t.string "ssl_session_reused"
t.string "ssl_curves"
t.string "ssl_ciphers"
t.string "ssl_cipher"
t.string "sent_http_x_xss_protection"
t.string "sent_http_x_frame_options"
t.string "sent_http_x_content_type_options"
t.string "sent_http_strict_transport_security"
t.string "nginx_version"
t.integer "pid"
t.string "remote_user"
t.boolean "crawler", default: false
t.string "http_referer"
t.datetime "created_at", precision: 6
t.string "request_uri", default: ""
t.decimal "datacenter_co2"
t.decimal "network_co2"
t.decimal "consumer_device_co2"
t.decimal "production_co2"
t.decimal "total_co2"
t.string "node"
t.index ["geoip2_data_city_name"], name: "index_access_logs_on_geoip2_data_city_name"
t.index ["geoip2_data_country_name"], name: "index_access_logs_on_geoip2_data_country_name"
t.index ["host"], name: "index_access_logs_on_host"
t.index ["http_origin"], name: "index_access_logs_on_http_origin"
t.index ["http_user_agent"], name: "index_access_logs_on_http_user_agent"
t.index ["status"], name: "index_access_logs_on_status"
t.index ["uri"], name: "index_access_logs_on_uri"
end
create_table "action_text_rich_texts", force: :cascade do |t|
t.string "name", null: false
t.text "body"
t.string "record_type", null: false
t.integer "record_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["record_type", "record_id", "name"], name: "index_action_text_rich_texts_uniqueness", unique: true
end
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.integer "record_id", null: false
t.integer "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.string "service_name", null: false
t.index ["key", "service_name"], name: "index_active_storage_blobs_on_key_and_service_name", unique: true
end
create_table "active_storage_variant_records", force: :cascade do |t|
t.bigint "blob_id", null: false
t.string "variation_digest", null: false
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
create_table "blazer_audits", force: :cascade do |t|
t.bigint "user_id"
t.bigint "query_id"
t.text "statement"
t.string "data_source"
t.datetime "created_at"
t.index ["query_id"], name: "index_blazer_audits_on_query_id"
t.index ["user_id"], name: "index_blazer_audits_on_user_id"
end
create_table "blazer_checks", force: :cascade do |t|
t.bigint "creator_id"
t.bigint "query_id"
t.string "state"
t.string "schedule"
t.text "emails"
t.text "slack_channels"
t.string "check_type"
t.text "message"
t.datetime "last_run_at"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["creator_id"], name: "index_blazer_checks_on_creator_id"
t.index ["query_id"], name: "index_blazer_checks_on_query_id"
end
create_table "blazer_dashboard_queries", force: :cascade do |t|
t.bigint "dashboard_id"
t.bigint "query_id"
t.integer "position"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["dashboard_id"], name: "index_blazer_dashboard_queries_on_dashboard_id"
t.index ["query_id"], name: "index_blazer_dashboard_queries_on_query_id"
end
create_table "blazer_dashboards", force: :cascade do |t|
t.bigint "creator_id"
t.text "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["creator_id"], name: "index_blazer_dashboards_on_creator_id"
end
create_table "blazer_queries", force: :cascade do |t|
t.bigint "creator_id"
t.string "name"
t.text "description"
t.text "statement"
t.string "data_source"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["creator_id"], name: "index_blazer_queries_on_creator_id"
end
create_table "build_stats", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "deploy_id"
t.bigint "bytes"
t.float "seconds"
t.string "action", null: false
t.text "log"
t.boolean "status", default: false
t.index ["deploy_id"], name: "index_build_stats_on_deploy_id"
end
create_table "codes_of_conduct", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "title"
t.text "description"
t.text "content"
end
create_table "csp_reports", id: :uuid, default: -> { "public.gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "disposition"
t.string "referrer"
t.string "blocked_uri"
t.string "document_uri"
t.string "effective_directive"
t.string "original_policy"
t.string "script_sample"
t.string "status_code"
t.string "violated_directive"
t.integer "column_number"
t.integer "line_number"
t.string "source_file"
end
create_table "deploys", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "site_id"
t.string "type"
t.text "values"
t.index ["site_id"], name: "index_deploys_on_site_id"
t.index ["type"], name: "index_deploys_on_type"
end
create_table "designs", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.text "description"
t.string "gem"
t.string "url"
t.string "license"
t.boolean "disabled", default: false
t.text "credits"
t.string "designer_url"
t.integer "priority"
t.index ["gem"], name: "index_designs_on_gem", unique: true
t.index ["name"], name: "index_designs_on_name", unique: true
end
create_table "distributed_press_publishers", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "instance"
t.text "token_ciphertext", null: false
t.datetime "expires_at"
end
create_table "indexed_posts", id: :uuid, default: -> { "public.gen_random_uuid()" }, force: :cascade do |t|
t.bigint "site_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "locale", default: "simple"
t.string "layout", null: false
t.string "path", null: false
t.string "title", default: ""
t.jsonb "front_matter", default: "{}"
t.string "content", default: ""
t.tsvector "indexed_content"
t.integer "order", default: 0
t.string "dictionary"
t.uuid "post_id"
t.index ["front_matter"], name: "index_indexed_posts_on_front_matter", using: :gin
t.index ["indexed_content"], name: "index_indexed_posts_on_indexed_content", using: :gin
t.index ["layout"], name: "index_indexed_posts_on_layout"
t.index ["locale"], name: "index_indexed_posts_on_locale"
t.index ["site_id"], name: "index_indexed_posts_on_site_id"
end
create_table "licencias", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.text "description"
t.text "deed"
t.string "url"
t.string "icons"
t.string "short_description"
t.index ["name"], name: "index_licencias_on_name", unique: true
end
create_table "log_entries", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.bigint "site_id"
t.text "text"
t.boolean "sent", default: false
t.index ["site_id"], name: "index_log_entries_on_site_id"
end
create_table "maintenances", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.text "message"
t.datetime "estimated_from"
t.datetime "estimated_to"
t.boolean "are_we_back", default: false
end
create_table "mobility_string_translations", force: :cascade do |t|
t.string "locale", null: false
t.string "key", null: false
t.string "value"
t.string "translatable_type"
t.integer "translatable_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["translatable_id", "translatable_type", "key"], name: "index_mobility_string_translations_on_translatable_attribute"
t.index ["translatable_id", "translatable_type", "locale", "key"], name: "index_mobility_string_translations_on_keys", unique: true
t.index ["translatable_type", "key", "value", "locale"], name: "index_mobility_string_translations_on_query_keys"
end
create_table "mobility_text_translations", force: :cascade do |t|
t.string "locale", null: false
t.string "key", null: false
t.text "value"
t.string "translatable_type"
t.integer "translatable_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["translatable_id", "translatable_type", "key"], name: "index_mobility_text_translations_on_translatable_attribute"
t.index ["translatable_id", "translatable_type", "locale", "key"], name: "index_mobility_text_translations_on_keys", unique: true
end
create_table "privacy_policies", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "title"
t.text "description"
t.text "content"
end
create_table "que_jobs", comment: "7", force: :cascade do |t|
t.integer "priority", limit: 2, default: 100, null: false
t.datetime "run_at", default: -> { "now()" }, null: false
t.text "job_class", null: false
t.integer "error_count", default: 0, null: false
t.text "last_error_message"
t.text "queue", default: "default", null: false
t.text "last_error_backtrace"
t.datetime "finished_at"
t.datetime "expired_at"
t.jsonb "args", default: [], null: false
t.jsonb "data", default: {}, null: false
t.integer "job_schema_version", null: false
t.jsonb "kwargs", default: {}, null: false
t.index ["args"], name: "que_jobs_args_gin_idx", opclass: :jsonb_path_ops, using: :gin
t.index ["data"], name: "que_jobs_data_gin_idx", opclass: :jsonb_path_ops, using: :gin
t.index ["job_schema_version", "queue", "priority", "run_at", "id"], name: "que_poll_idx", where: "((finished_at IS NULL) AND (expired_at IS NULL))"
t.index ["kwargs"], name: "que_jobs_kwargs_gin_idx", opclass: :jsonb_path_ops, using: :gin
t.check_constraint "(char_length(last_error_message) <= 500) AND (char_length(last_error_backtrace) <= 10000)", name: "error_length"
t.check_constraint "(jsonb_typeof(data) = 'object'::text) AND ((NOT (data ? 'tags'::text)) OR ((jsonb_typeof((data -> 'tags'::text)) = 'array'::text) AND (jsonb_array_length((data -> 'tags'::text)) <= 5) AND que_validate_tags((data -> 'tags'::text))))", name: "valid_data"
t.check_constraint "char_length(queue) <= 100", name: "queue_length"
t.check_constraint "jsonb_typeof(args) = 'array'::text", name: "valid_args"
t.check_constraint nil, name: "job_class_length"
end
create_table "que_lockers", primary_key: "pid", id: :integer, default: nil, force: :cascade do |t|
t.integer "worker_count", null: false
t.integer "worker_priorities", null: false, array: true
t.integer "ruby_pid", null: false
t.text "ruby_hostname", null: false
t.text "queues", null: false, array: true
t.boolean "listening", null: false
t.integer "job_schema_version", default: 1
t.check_constraint "(array_ndims(queues) = 1) AND (array_length(queues, 1) IS NOT NULL)", name: "valid_queues"
t.check_constraint "(array_ndims(worker_priorities) = 1) AND (array_length(worker_priorities, 1) IS NOT NULL)", name: "valid_worker_priorities"
end
create_table "que_values", primary_key: "key", id: :text, force: :cascade do |t|
t.jsonb "value", default: {}, null: false
t.check_constraint "jsonb_typeof(value) = 'object'::text", name: "valid_value"
end
create_table "roles", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "site_id"
t.integer "usuarie_id"
t.string "rol"
t.boolean "temporal"
t.index ["site_id", "usuarie_id"], name: "index_roles_on_site_id_and_usuarie_id", unique: true
t.index ["site_id"], name: "index_roles_on_site_id"
t.index ["usuarie_id"], name: "index_roles_on_usuarie_id"
end
create_table "rollups", force: :cascade do |t|
t.string "name", null: false
t.string "interval", null: false
t.datetime "time", null: false
t.jsonb "dimensions", default: {}, null: false
t.float "value"
t.index ["name", "interval", "time", "dimensions"], name: "index_rollups_on_name_and_interval_and_time_and_dimensions", unique: true
end
create_table "sites", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.integer "design_id"
t.integer "licencia_id"
t.string "status", default: "waiting"
t.text "description"
t.string "title"
t.boolean "colaboracion_anonima", default: false
t.boolean "contact", default: false
t.string "private_key_ciphertext"
t.boolean "acepta_invitades", default: false
t.string "tienda_api_key_ciphertext", default: ""
t.string "tienda_url", default: ""
t.string "api_key_ciphertext"
t.string "slugify_mode", default: "default"
t.index ["design_id"], name: "index_sites_on_design_id"
t.index ["licencia_id"], name: "index_sites_on_licencia_id"
t.index ["name"], name: "index_sites_on_name", unique: true
end
create_table "stats", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.bigint "site_id"
t.string "name", null: false
t.index ["name"], name: "index_stats_on_name", using: :hash
t.index ["site_id"], name: "index_stats_on_site_id"
end
create_table "usuaries", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.string "unconfirmed_email"
t.integer "failed_attempts", default: 0, null: false
t.string "unlock_token"
t.datetime "locked_at"
t.string "invitation_token"
t.datetime "invitation_created_at"
t.datetime "invitation_sent_at"
t.datetime "invitation_accepted_at"
t.integer "invitation_limit"
t.string "invited_by_type"
t.integer "invited_by_id"
t.integer "invitations_count", default: 0
t.string "lang", default: "es"
t.datetime "privacy_policy_accepted_at"
t.datetime "terms_of_service_accepted_at"
t.datetime "code_of_conduct_accepted_at"
t.datetime "available_for_feedback_accepted_at"
t.index ["confirmation_token"], name: "index_usuaries_on_confirmation_token", unique: true
t.index ["email"], name: "index_usuaries_on_email", unique: true
t.index ["invitation_token"], name: "index_usuaries_on_invitation_token", unique: true
t.index ["invitations_count"], name: "index_usuaries_on_invitations_count"
t.index ["invited_by_id"], name: "index_usuaries_on_invited_by_id"
t.index ["invited_by_type", "invited_by_id"], name: "index_usuaries_on_invited_by_type_and_invited_by_id"
t.index ["reset_password_token"], name: "index_usuaries_on_reset_password_token", unique: true
t.index ["unlock_token"], name: "index_usuaries_on_unlock_token", unique: true
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
# no candidate create_trigger statement could be found, creating an adapter-specific one
execute(<<-SQL)
CREATE OR REPLACE FUNCTION public.que_job_notify()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
DECLARE
locker_pid integer;
sort_key json;
BEGIN
-- Don't do anything if the job is scheduled for a future time.
IF NEW.run_at IS NOT NULL AND NEW.run_at > now() THEN
RETURN null;
END IF;
-- Pick a locker to notify of the job's insertion, weighted by their number
-- of workers. Should bounce pseudorandomly between lockers on each
-- invocation, hence the md5-ordering, but still touch each one equally,
-- hence the modulo using the job_id.
SELECT pid
INTO locker_pid
FROM (
SELECT *, last_value(row_number) OVER () + 1 AS count
FROM (
SELECT *, row_number() OVER () - 1 AS row_number
FROM (
SELECT *
FROM public.que_lockers ql, generate_series(1, ql.worker_count) AS id
WHERE
listening AND
queues @> ARRAY[NEW.queue] AND
ql.job_schema_version = NEW.job_schema_version
ORDER BY md5(pid::text || id::text)
) t1
) t2
) t3
WHERE NEW.id % count = row_number;
IF locker_pid IS NOT NULL THEN
-- There's a size limit to what can be broadcast via LISTEN/NOTIFY, so
-- rather than throw errors when someone enqueues a big job, just
-- broadcast the most pertinent information, and let the locker query for
-- the record after it's taken the lock. The worker will have to hit the
-- DB in order to make sure the job is still visible anyway.
SELECT row_to_json(t)
INTO sort_key
FROM (
SELECT
'job_available' AS message_type,
NEW.queue AS queue,
NEW.priority AS priority,
NEW.id AS id,
-- Make sure we output timestamps as UTC ISO 8601
to_char(NEW.run_at AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS.US"Z"') AS run_at
) t;
PERFORM pg_notify('que_listener_' || locker_pid::text, sort_key::text);
END IF;
RETURN null;
END
$function$
SQL
# no candidate create_trigger statement could be found, creating an adapter-specific one
execute("CREATE TRIGGER que_job_notify AFTER INSERT ON \"que_jobs\" FOR EACH ROW WHEN (NOT COALESCE(current_setting('que.skip_notify'::text, true), ''::text) = 'true'::text) EXECUTE FUNCTION que_job_notify()")
# no candidate create_trigger statement could be found, creating an adapter-specific one
execute(<<-SQL)
CREATE OR REPLACE FUNCTION public.que_state_notify()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
DECLARE
row record;
message json;
previous_state text;
current_state text;
BEGIN
IF TG_OP = 'INSERT' THEN
previous_state := 'nonexistent';
current_state := public.que_determine_job_state(NEW);
row := NEW;
ELSIF TG_OP = 'DELETE' THEN
previous_state := public.que_determine_job_state(OLD);
current_state := 'nonexistent';
row := OLD;
ELSIF TG_OP = 'UPDATE' THEN
previous_state := public.que_determine_job_state(OLD);
current_state := public.que_determine_job_state(NEW);
-- If the state didn't change, short-circuit.
IF previous_state = current_state THEN
RETURN null;
END IF;
row := NEW;
ELSE
RAISE EXCEPTION 'Unrecognized TG_OP: %', TG_OP;
END IF;
SELECT row_to_json(t)
INTO message
FROM (
SELECT
'job_change' AS message_type,
row.id AS id,
row.queue AS queue,
coalesce(row.data->'tags', '[]'::jsonb) AS tags,
to_char(row.run_at AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS.US"Z"') AS run_at,
to_char(now() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS.US"Z"') AS time,
CASE row.job_class
WHEN 'ActiveJob::QueueAdapters::QueAdapter::JobWrapper' THEN
coalesce(
row.args->0->>'job_class',
'ActiveJob::QueueAdapters::QueAdapter::JobWrapper'
)
ELSE
row.job_class
END AS job_class,
previous_state AS previous_state,
current_state AS current_state
) t;
PERFORM pg_notify('que_state', message::text);
RETURN null;
END
$function$
SQL
# no candidate create_trigger statement could be found, creating an adapter-specific one
execute("CREATE TRIGGER que_state_notify AFTER INSERT OR DELETE OR UPDATE ON \"que_jobs\" FOR EACH ROW WHEN (NOT COALESCE(current_setting('que.skip_notify'::text, true), ''::text) = 'true'::text) EXECUTE FUNCTION que_state_notify()")
create_trigger("indexed_posts_before_insert_update_row_tr", :compatibility => 1).
on("indexed_posts").
before(:insert, :update) do
<<-SQL_ACTIONS
new.indexed_content := to_tsvector(('pg_catalog.' || new.dictionary)::regconfig, coalesce(new.title, '') || '
' || coalesce(new.content,''));
SQL_ACTIONS
end
create_trigger("access_logs_before_insert_row_tr", :compatibility => 1).
on("access_logs").
before(:insert) do
"new.created_at := to_timestamp(new.msec);"
end
end

2267
db/structure.sql Normal file

File diff suppressed because it is too large Load diff