feat: Sync supabase secrets to gitea

feat: Backup supabase
This commit is contained in:
2026-04-14 20:22:55 -04:00
parent 4e23df5a8e
commit b74528b6f1
3 changed files with 176 additions and 0 deletions

View File

@@ -33,6 +33,10 @@ Load `docs/context/architecture.md` when working on playbooks, EDA rulebooks, or
6. Do not bulk-read documents. Process one at a time: read, summarize to disk, release from context before reading next. For the detailed protocol, read `docs/context/processing-protocol.md`.
7. Sub-agent returns must be structured, not free-form prose. Use output contracts from `templates/claude-templates.md`.
## Ansible Conventions
- **Never embed vars in playbooks.** All variables go in the inventory at `/home/ptoal/Dev/inventories/bab-inventory` — in `host_vars/<host>/` or `group_vars/<group>/` as appropriate.
## Where Things Live
- `templates/claude-templates.md` — summary, handoff, decision, analysis, task, output contract templates (read on demand)

View File

@@ -0,0 +1,108 @@
---
- name: Dump Supabase prod database to local temp file
hosts: localhost
connection: local
gather_facts: false
tasks:
- name: Read Supabase prod secrets from Vault
ansible.builtin.set_fact:
_supabase_prod: "{{ lookup('community.hashi_vault.hashi_vault',
'secret=oys/prod/supabase url=' + vault_addr + ' engine_mount_point=kv') }}"
no_log: true
- name: Set backup filename
ansible.builtin.set_fact:
_backup_filename: >-
{{ 'oysqn-prod-' + now(fmt='%Y-%m') + '-monthly.sql.gz'
if now(fmt='%-d') == '1'
else 'oysqn-prod-' + now(fmt='%Y%m%d-%H%M%S') + '.sql.gz' }}
- name: Create local temporary directory
ansible.builtin.tempfile:
state: directory
suffix: .backup
register: _tmpdir
- name: Dump and compress database
ansible.builtin.shell:
cmd: "set -o pipefail && pg_dump '{{ _supabase_prod.postgres_url }}' | gzip > '{{ _tmpdir.path }}/{{ _backup_filename }}'"
executable: /bin/bash
changed_when: true
no_log: true
- name: Store backup on bab1 and enforce retention
hosts: bab1.mgmt.toal.ca
gather_facts: false
tasks:
- name: Ensure backup directory exists
ansible.builtin.file:
path: "{{ backup_base_dir }}"
state: directory
mode: '0750'
- name: Copy backup file to bab1
ansible.builtin.copy:
src: "{{ hostvars['localhost']['_tmpdir']['path'] }}/{{ hostvars['localhost']['_backup_filename'] }}"
dest: "{{ backup_base_dir }}/{{ hostvars['localhost']['_backup_filename'] }}"
mode: '0640'
- name: Find regular backup files older than retention period
ansible.builtin.find:
paths: "{{ backup_base_dir }}"
patterns: "oysqn-prod-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9]*.sql.gz"
age: "{{ backup_retain_regular_days }}d"
age_stamp: mtime
register: _regular_old
- name: Delete regular backups beyond age limit
ansible.builtin.file:
path: "{{ item.path }}"
state: absent
loop: "{{ _regular_old.files }}"
- name: Find all regular backup files
ansible.builtin.find:
paths: "{{ backup_base_dir }}"
patterns: "oysqn-prod-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9]*.sql.gz"
register: _regular_all
- name: Delete oldest regular backups beyond count limit
ansible.builtin.file:
path: "{{ item.path }}"
state: absent
loop: "{{ (_regular_all.files | sort(attribute='mtime'))[: [(_regular_all.files | length - backup_retain_regular_count), 0] | max | int] }}"
- name: Find monthly backup files older than retention period
ansible.builtin.find:
paths: "{{ backup_base_dir }}"
patterns: "oysqn-prod-[0-9][0-9][0-9][0-9]-[0-9][0-9]-monthly.sql.gz"
age: "{{ backup_retain_monthly_days }}d"
age_stamp: mtime
register: _monthly_old
- name: Delete monthly backups beyond age limit
ansible.builtin.file:
path: "{{ item.path }}"
state: absent
loop: "{{ _monthly_old.files }}"
- name: Find all monthly backup files
ansible.builtin.find:
paths: "{{ backup_base_dir }}"
patterns: "oysqn-prod-[0-9][0-9][0-9][0-9]-[0-9][0-9]-monthly.sql.gz"
register: _monthly_all
- name: Delete oldest monthly backups beyond count limit
ansible.builtin.file:
path: "{{ item.path }}"
state: absent
loop: "{{ (_monthly_all.files | sort(attribute='mtime'))[: [(_monthly_all.files | length - backup_retain_monthly_count), 0] | max | int] }}"
- name: Remove local temporary directory
ansible.builtin.file:
path: "{{ hostvars['localhost']['_tmpdir']['path'] }}"
state: absent
delegate_to: localhost

View File

@@ -0,0 +1,64 @@
---
- name: Sync Supabase secrets to Gitea repo variables
hosts: localhost
connection: local
gather_facts: false
tasks:
- name: Read Supabase dev secrets from Vault
ansible.builtin.set_fact:
_supabase_dev: "{{ lookup('community.hashi_vault.hashi_vault',
'secret=oys/dev/supabase url=' + vault_addr + ' engine_mount_point=kv') }}"
no_log: true
- name: Read Supabase prod secrets from Vault
ansible.builtin.set_fact:
_supabase_prod: "{{ lookup('community.hashi_vault.hashi_vault',
'secret=oys/prod/supabase url=' + vault_addr + ' engine_mount_point=kv') }}"
no_log: true
- name: Read Gitea API token from Vault
ansible.builtin.set_fact:
_gitea_token: "{{ lookup('community.hashi_vault.hashi_vault',
'secret=oys/shared/infra/gitea_token url=' + vault_addr + ' engine_mount_point=kv') }}"
no_log: true
- name: Construct ENV_FILE_DEV content
ansible.builtin.set_fact:
_env_file_dev: |
SUPABASE_URL={{ _supabase_dev.url }}
SUPABASE_ANON_KEY={{ _supabase_dev.anon_key }}
no_log: true
- name: Construct ENV_FILE_PROD content
ansible.builtin.set_fact:
_env_file_prod: |
SUPABASE_URL={{ _supabase_prod.url }}
SUPABASE_ANON_KEY={{ _supabase_prod.anon_key }}
no_log: true
- name: Update ENV_FILE_DEV Gitea variable
ansible.builtin.uri:
url: "{{ gitea_base_url }}/api/v1/repos/{{ gitea_owner }}/{{ gitea_repo }}/actions/variables/ENV_FILE_DEV"
method: PUT
headers:
Authorization: "token {{ _gitea_token.value }}"
Content-Type: application/json
body_format: json
body:
value: "{{ _env_file_dev }}"
status_code: [201, 204]
no_log: true
- name: Update ENV_FILE_PROD Gitea variable
ansible.builtin.uri:
url: "{{ gitea_base_url }}/api/v1/repos/{{ gitea_owner }}/{{ gitea_repo }}/actions/variables/ENV_FILE_PROD"
method: PUT
headers:
Authorization: "token {{ _gitea_token.value }}"
Content-Type: application/json
body_format: json
body:
value: "{{ _env_file_prod }}"
status_code: [201, 204]
no_log: true