Compare commits

...

4 Commits

Author SHA1 Message Date
5c830443f3 docs: update plan for migration of backend 2026-04-12 12:54:10 -04:00
c789454810 docs: Update architecture for supabase
test: Add tests for auth workflow
2026-04-12 10:14:44 -04:00
355f3c5dfa feat: implement basic auth workflow 2026-04-11 21:36:50 -04:00
652ac1e2af refactor: Implemented basic app structure 2026-04-10 19:12:26 -04:00
27 changed files with 3414 additions and 79 deletions

View File

@@ -0,0 +1,8 @@
Write a session handoff file for the current session.
Steps:
1. Read `templates/claude-templates.md` and find the Session Handoff template (Template 4). Use the Light Handoff if this is a small project (under 5 sessions), Full Handoff otherwise.
2. Fill in every field based on what was accomplished in this session. Be specific — include exact file paths for every output, exact numbers discovered, and conditional logic established.
3. Write the handoff to `./docs/summaries/handoff-[today's date]-[topic].md`.
4. If a previous handoff file exists in `./docs/summaries/`, move it to `./docs/archive/handoffs/`.
5. Tell me the file path of the new handoff and summarize what it contains.

View File

@@ -0,0 +1,13 @@
Process an input document into a structured source summary.
Steps:
1. Read `templates/claude-templates.md` and find the Source Document Summary template (Template 1). Use the Light Source Summary if this is a small project (under 5 sessions), Full Source Summary otherwise.
2. Read the document at: $ARGUMENTS
3. Extract all information into the template format. Pay special attention to:
- EXACT numbers — do not round or paraphrase
- Requirements in IF/THEN/BUT/EXCEPT format
- Decisions with rationale and rejected alternatives
- Open questions marked as OPEN, ASSUMED, or MISSING
4. Write the summary to `./docs/summaries/source-[filename].md`.
5. Move the original document to `./docs/archive/`.
6. Tell me: what was extracted, what's unclear, and what needs follow-up.

View File

@@ -0,0 +1,13 @@
Report on the current project state.
Steps:
1. Read `./docs/summaries/00-project-brief.md` for project context.
2. Find and read the latest `handoff-*.md` file in `./docs/summaries/` for current state.
3. List all files in `./docs/summaries/` to understand what's been processed.
4. Report:
- **Project:** name and type from the project brief
- **Current phase:** based on the project phase tracker
- **Last session:** what was accomplished (from the latest handoff)
- **Next steps:** what the next session should do (from the latest handoff)
- **Open questions:** anything unresolved
- **Summary file count:** how many files in docs/summaries/ (warn if approaching 15)

1
.nuxtrc Normal file
View File

@@ -0,0 +1 @@
setups.@nuxt/test-utils="4.0.2"

1
.nvmrc Normal file
View File

@@ -0,0 +1 @@
22

View File

@@ -1,10 +1,7 @@
import { checkAuthRedirect } from '~/utils/auth'
export default defineNuxtRouteMiddleware((to) => {
const user = useSupabaseUser()
const publicRoutes = ['/login', '/signup', '/auth/callback']
if (publicRoutes.includes(to.path)) return
if (!user.value) {
return navigateTo('/login')
}
const redirect = checkAuthRedirect(user.value, to.path)
if (redirect) return navigateTo(redirect)
})

View File

@@ -1,13 +1,59 @@
<template>
<IonPage>
<IonContent class="ion-padding ion-text-center">
<p>Signing you in...</p>
<div class="callback-state">
<IonSpinner v-if="!errorMessage" name="crescent" class="callback-spinner" />
<IonIcon v-else :icon="alertCircleOutline" class="callback-error-icon" color="danger" />
<p>{{ errorMessage || 'Signing you in...' }}</p>
</div>
</IonContent>
</IonPage>
</template>
<script setup lang="ts">
import { IonPage, IonContent } from '@ionic/vue'
import { IonPage, IonContent, IonSpinner, IonIcon } from '@ionic/vue'
import { alertCircleOutline } from 'ionicons/icons'
definePageMeta({ layout: false })
const supabase = useSupabaseClient()
const user = useSupabaseUser()
const route = useRoute()
const errorMessage = ref('')
onMounted(async () => {
const code = route.query.code as string | undefined
if (code) {
const { error } = await supabase.auth.exchangeCodeForSession(code)
if (error) {
errorMessage.value = 'Sign-in failed. Please try again.'
return
}
}
})
// Navigate home once session is established (handles both hash-based and PKCE flows)
watch(user, (value) => {
if (value) navigateTo('/')
}, { immediate: true })
</script>
<style scoped>
.callback-state {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
height: 100%;
gap: 1rem;
}
.callback-spinner {
width: 2.5rem;
height: 2.5rem;
}
.callback-error-icon {
font-size: 2.5rem;
}
</style>

View File

@@ -1,22 +1,65 @@
<template>
<IonPage>
<IonHeader>
<IonToolbar color="primary">
<IonButtons slot="start">
<IonMenuButton />
</IonButtons>
<IonTitle>Home</IonTitle>
</IonToolbar>
</IonHeader>
<IonContent class="ion-padding">
<h2>Welcome to OYS Borrow a Boat</h2>
</IonContent>
<!-- Splash: unauthenticated -->
<template v-if="!user">
<IonContent class="splash-content">
<div class="splash-center">
<img src="/oysqn_logo.png" alt="OYS Borrow a Boat" class="splash-logo" />
<IonButton expand="block" router-link="/login" class="splash-btn">Log In</IonButton>
</div>
</IonContent>
</template>
<!-- Home: authenticated -->
<template v-else>
<IonHeader>
<IonToolbar color="primary">
<IonButtons slot="start">
<IonMenuButton />
</IonButtons>
<IonTitle>Home</IonTitle>
</IonToolbar>
</IonHeader>
<IonContent class="ion-padding">
<h2>Welcome to OYS Borrow a Boat</h2>
</IonContent>
</template>
</IonPage>
</template>
<script setup lang="ts">
import {
IonPage, IonHeader, IonToolbar, IonTitle, IonContent,
IonButtons, IonMenuButton,
IonButtons, IonMenuButton, IonButton,
} from '@ionic/vue'
const user = useSupabaseUser()
definePageMeta({ layout: false })
</script>
<style scoped>
.splash-content {
--background: var(--ion-color-light);
}
.splash-center {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
height: 100%;
padding: 2rem;
gap: 2rem;
}
.splash-logo {
max-width: 280px;
width: 100%;
}
.splash-btn {
width: 100%;
max-width: 280px;
}
</style>

View File

@@ -2,17 +2,114 @@
<IonPage>
<IonHeader>
<IonToolbar color="primary">
<IonButtons slot="start">
<IonBackButton default-href="/" />
</IonButtons>
<IonTitle>Sign In</IonTitle>
</IonToolbar>
</IonHeader>
<IonContent class="ion-padding">
<!-- TODO: Auth form -->
<div class="login-form">
<template v-if="!sent">
<p class="ion-text-center">Enter your email address and we'll send you a sign-in link.</p>
<IonList>
<IonItem>
<IonLabel position="stacked">Email address</IonLabel>
<IonInput
v-model="email"
type="email"
placeholder="you@example.com"
autocomplete="email"
inputmode="email"
@keyup.enter="send"
/>
</IonItem>
</IonList>
<IonButton
expand="block"
class="ion-margin-top"
:disabled="!email || loading"
@click="send"
>
<IonSpinner v-if="loading" name="crescent" slot="start" />
Send Sign-In Link
</IonButton>
<p v-if="error" class="error-text ion-text-center">{{ error }}</p>
</template>
<template v-else>
<div class="sent-state ion-text-center">
<IonIcon :icon="mailOutline" class="sent-icon" />
<h2>Check your email</h2>
<p>A sign-in link was sent to <strong>{{ email }}</strong>. Open it on this device to sign in.</p>
<IonButton fill="outline" expand="block" class="ion-margin-top" @click="reset">
Use a different email
</IonButton>
</div>
</template>
</div>
</IonContent>
</IonPage>
</template>
<script setup lang="ts">
import { IonPage, IonHeader, IonToolbar, IonTitle, IonContent } from '@ionic/vue'
import {
IonPage, IonHeader, IonToolbar, IonTitle, IonContent,
IonButtons, IonBackButton, IonButton, IonList, IonItem,
IonLabel, IonInput, IonSpinner, IonIcon,
} from '@ionic/vue'
import { mailOutline } from 'ionicons/icons'
import { useAuthStore } from '~/stores/auth'
definePageMeta({ layout: false })
const auth = useAuthStore()
const email = ref('')
const loading = ref(false)
const sent = ref(false)
const error = ref('')
async function send() {
if (!email.value || loading.value) return
loading.value = true
error.value = ''
try {
await auth.sendMagicLink(email.value.trim())
sent.value = true
} catch (e: unknown) {
error.value = e instanceof Error ? e.message : 'Failed to send link. Please try again.'
} finally {
loading.value = false
}
}
function reset() {
sent.value = false
email.value = ''
error.value = ''
}
</script>
<style scoped>
.login-form {
max-width: 400px;
margin: 2rem auto;
}
.sent-state {
margin-top: 4rem;
}
.sent-icon {
font-size: 4rem;
color: var(--ion-color-primary);
margin-bottom: 1rem;
}
.error-text {
color: var(--ion-color-danger);
font-size: 0.875rem;
margin-top: 0.5rem;
}
</style>

View File

@@ -1,7 +0,0 @@
import { IonicVue } from '@ionic/vue'
export default defineNuxtPlugin((nuxtApp) => {
nuxtApp.vueApp.use(IonicVue, {
mode: 'md', // Use Material Design style on all platforms for consistency
})
})

14
app/utils/auth.ts Normal file
View File

@@ -0,0 +1,14 @@
export const PUBLIC_ROUTES = ['/', '/login', '/signup', '/auth/callback'] as const
/**
* Pure auth decision logic — no Nuxt/Supabase dependencies.
* Returns the path to redirect to, or null if no redirect is needed.
*/
export function checkAuthRedirect(
userValue: { id: string } | null,
path: string,
): string | null {
if ((PUBLIC_ROUTES as readonly string[]).includes(path)) return null
if (!userValue) return '/'
return null
}

View File

@@ -0,0 +1,103 @@
# Session Handoff: Local Dev Setup & Ionic Module Migration
**Date:** 2026-03-26
**Session Duration:** ~1.5 hours
**Session Focus:** Initialize local Supabase DB, get app loading in browser, fix Ionic/Nuxt router integration using the official @nuxtjs/ionic module
**Context Usage at Handoff:** Medium-high
## What Was Accomplished
1. **Node upgraded to 22** — added `engines: { node: ">=22" }` to `package.json`, created `.nvmrc` with value `22`
2. **Local Supabase initialized** — schema applied via migration; DB running at `postgresql://postgres:postgres@127.0.0.1:54322/postgres`
3. **`.env` switched to local Supabase** — URL `http://127.0.0.1:54321`, publishable key `sb_publishable_ACJWlzQHlZjBrEguHvfOxg_3BJgxAaH`
4. **Schema migration created**`supabase/migrations/20260325000000_initial_schema.sql` (reordered from `schema.sql`: all tables first, then RLS policies, then trigger — original order caused `members` not found error during `boats` policy creation)
5. **`yarn dev` running** — app responds HTTP 200 at `http://localhost:3000`
6. **Migrated to `@nuxtjs/ionic`** — replaced manual IonicVue plugin + navManager/viewStacks shims (250 lines) with the official Nuxt Ionic module
## Exact State of Work in Progress
- App loads and serves HTTP 200; browser rendering not fully verified (IonRouterOutlet may still need testing)
- No pages beyond skeleton `index.vue`, `login.vue`, `auth/callback.vue` — all page content is TODO
- Auth flow not implemented (`login.vue` is still a skeleton)
## Decisions Made This Session
- USE `@nuxtjs/ionic` module INSTEAD OF manual `@ionic/vue` plugin BECAUSE it is the idiomatic Nuxt+Ionic integration; provides `@ionic/vue-router` correctly (which supplies `navManager` and `viewStacks` that `IonRouterOutlet` requires) — STATUS: confirmed
- USE local Supabase via CLI INSTEAD OF remote project for development BECAUSE user decision — STATUS: confirmed
- DOCKER_HOST must be set to `unix:///run/user/1000/podman/podman.sock` for all `npx supabase` commands on this machine — STATUS: confirmed
- USE `supabase db reset` to apply schema changes locally — STATUS: confirmed
- `mode: 'md'` moved from plugin to `ionic.config` in `nuxt.config.ts` — STATUS: confirmed
## Key Numbers Generated or Discovered This Session
- Local Supabase API port: 54321
- Local Supabase DB port: 54322
- Local Supabase Studio port: 54323
- Local Supabase Mailpit port: 54324
- Migration file timestamp: 20260325000000
- 6 tables in schema (unchanged from prior session)
- `@nuxtjs/ionic` version installed: 1.0.2
## Files Created or Modified
| File Path | Action | Description |
|-----------|--------|-------------|
| `.nvmrc` | Created | Pins Node to version 22 |
| `package.json` | Modified | Added `engines: { node: ">=22" }`, added `@nuxtjs/ionic`, `@ionic/vue-router` |
| `supabase/migrations/20260325000000_initial_schema.sql` | Created | Initial schema migration (reordered from schema.sql) |
| `.env` | Modified | Switched from remote Supabase to local (URL + publishable key) |
| `nuxt.config.ts` | Modified | Added `@nuxtjs/ionic` module, `ionic: { css: { utilities: true }, config: { mode: 'md' } }`, removed manual Ionic CSS imports and CDN link, removed vite optimizeDeps for @ionic/vue |
| `app/plugins/ionic.client.ts` | Deleted | Replaced by @nuxtjs/ionic module |
## What the NEXT Session Should Do
1. **First**: Verify app renders correctly in browser (check for Vue/Ionic console errors, confirm IonRouterOutlet works)
2. **Then**: Implement `app/pages/login.vue` — OTP flow: email input → `sendOtp()` → token input → `verifyOtp()` → redirect to `/`
3. **Then**: Implement `app/pages/auth/callback.vue` — handle magic link redirect (Supabase sets session from URL hash)
4. **Then**: Test auth flow end-to-end: send OTP via Mailpit (http://127.0.0.1:54324), verify, confirm member row created in `members` table
5. **Then**: Implement `app/pages/index.vue` — home page shell showing boat list or welcome state
6. **Then**: Run `npx supabase gen types typescript --local > app/types/supabase.ts` to replace placeholder types and remove `as any` casts in stores
## Open Questions Requiring User Input
- [ ] Should `login.vue` support magic link (email link) in addition to OTP, or OTP-only? — impacts login page UI
- [ ] Scheduling refactor design still deferred — should it be designed before or after auth + boat pages?
## Assumptions That Need Validation
- ASSUMED: `@nuxtjs/ionic` module correctly wires `@ionic/vue-router` so `IonRouterOutlet` has `navManager` and `viewStacks` — validate by loading app in browser and checking for no inject warnings
- ASSUMED: `handle_new_user()` trigger fires on magic link / OTP first sign-in — validate by signing in a test user and checking `members` table
- ASSUMED: `useIonRouter()` composable (from `@nuxtjs/ionic` auto-imports) should be used instead of `useRouter()` — validate against module docs; update `app/stores/auth.ts` if needed
## What NOT to Re-Read
- `docs/archive/handoffs/handoff-2026-03-25-initial-setup.md` — superseded
- `docs/archive/handoffs/handoff-2026-03-25-project-scaffold.md` — superseded
## Files to Load Next Session
- `app/pages/login.vue` — primary implementation target
- `app/stores/auth.ts` — has `sendOtp()`, `verifyOtp()`, `sendMagicLink()` — needed for login page
- `app/pages/auth/callback.vue` — second implementation target
- `app/types/supabase.ts` — after regenerating with `--local` flag
## Dev Environment Reference
```
# Start local Supabase (already running, but if needed):
DOCKER_HOST=unix:///run/user/1000/podman/podman.sock npx supabase start
# Check status / get keys:
DOCKER_HOST=unix:///run/user/1000/podman/podman.sock npx supabase status
# Apply schema changes:
DOCKER_HOST=unix:///run/user/1000/podman/podman.sock npx supabase db reset
# Start app:
yarn dev
# Mailpit (view OTP emails):
http://127.0.0.1:54324
# Supabase Studio:
http://127.0.0.1:54323
```

View File

@@ -0,0 +1,192 @@
# SDLC Architecture — oysqn.app
**Date:** 2026-04-12
**Status:** Decided — no open items
## Project Topology
| Repo | Purpose | Status |
|------|---------|--------|
| `oysqn.app` | Nuxt PWA + Supabase schema/migrations + all tests | Active |
| `bab-backend-ansible` | Infra provisioning, deployment orchestration, day2 ops | Needs rewrite (Appwrite → Supabase) |
## Lifecycle Phases
1. **Local Dev**`yarn dev` + `npx supabase start` (Podman)
2. **Dev server**`https://bab.toal.ca` (nginx on `bab1.mgmt.toal.ca`, webroot `/usr/share/nginx/html/`); backend = supabase.com
3. **Production** — static site on AWS S3; backend = supabase.com
## Backend Hosting
**supabase.com** (free tier initially; may self-host if free plan limits are exceeded).
- supabase.com provides direct Postgres access (connection string) on all tiers including free.
- Backups: `pg_dump` via Postgres connection string → compressed → stored to `bab1.mgmt.toal.ca` via SSH.
- Migrations: `supabase db push` against remote project (Supabase CLI).
- Rollback strategy: pre-migration `pg_dump` backup + rollback SQL scripts (see Down-Migration Convention).
## Supabase Projects
Two separate supabase.com projects — isolated credentials and migration state:
| Project | Purpose |
|---------|---------|
| `oysqn-dev` | Development + staging |
| `oysqn-prod` | Production |
Migrations promoted dev → prod only after E2E validation on dev.
## Down-Migration Convention
Supabase CLI only runs forward migrations. Rollback scripts are separate files executed by AAP on failure.
```
supabase/
migrations/
20260325000000_initial_schema.sql ← forward (applied by supabase db push)
20260412120000_add_boats_table.sql
rollback/
20260412120000_add_boats_table.sql ← reverse SQL, same filename, separate dir
```
Rules:
- Every forward migration **must** have a corresponding rollback file before the PR merges
- Rollback files are plain SQL executed by AAP via `psql` on rollback
- If a migration is irreversible (e.g., data-destroying DROP), document this explicitly at the top of the rollback file — AAP alerts and halts rather than executing
## Secrets Management
**All secrets in HashiCorp Vault:** `http://nas.lan.toal.ca:8200` — KV path prefix: `kv/oys/`
Format: `kv/oys/(dev|prod|shared)/(supabase|app|infra)/<secretname>`
| Secret | Vault path | Consumers |
|--------|-----------|-----------|
| Supabase dev API URL | `kv/oys/dev/supabase/url` | Gitea Actions (ENV_FILE), AAP |
| Supabase dev anon key | `kv/oys/dev/supabase/anon_key` | Gitea Actions (ENV_FILE), AAP |
| Supabase dev service role key | `kv/oys/dev/supabase/service_role_key` | AAP (migrations) |
| Supabase prod API URL | `kv/oys/prod/supabase/url` | Gitea Actions (ENV_FILE), AAP |
| Supabase prod anon key | `kv/oys/prod/supabase/anon_key` | Gitea Actions (ENV_FILE), AAP |
| Supabase prod service role key | `kv/oys/prod/supabase/service_role_key` | AAP (migrations, pg_dump) |
| Supabase prod Postgres conn string | `kv/oys/prod/supabase/postgres_url` | AAP (pg_dump) |
| AWS access key ID | `kv/oys/prod/app/aws_access_key_id` | AAP (S3 deploy) |
| AWS secret access key | `kv/oys/prod/app/aws_secret_access_key` | AAP (S3 deploy) |
| AWS S3 bucket name | `kv/oys/prod/app/aws_s3_bucket` | AAP (S3 deploy) |
| SSH private key (bab1) | `kv/oys/shared/infra/ssh_private_key` | AAP (backup, nginx deploy) |
| Gitea API token | `kv/oys/shared/infra/gitea_token` | AAP (fetch artifacts, sync secrets) |
**Local dev:** Secrets in `.env` (git-ignored). Do not put real values in `.env.example`.
**AAP:** Vault lookup plugin as a credential type.
**Gitea Actions:** Variable `ENV_FILE` (per branch) populated by AAP sync playbook (see below).
**Gitea URL:** `https://gitea.toal.ca/`
**Gitea artifact download token:** `kv/oys/bab_gitea` (pre-existing secret; deviates from naming convention — use as-is).
### Gitea Actions Secret Injection
Follows the same `ENV_FILE` pattern as `bab-app`. AAP runs a `sync-gitea-secrets` playbook:
- **Trigger:** Scheduled daily + on-demand job template
- **Action:** Reads `url` + `anon_key` from Vault, constructs `.env` content, updates Gitea repo variable via API (`PUT /api/v1/repos/{owner}/{repo}/actions/variables/ENV_FILE_DEV` and `ENV_FILE_PROD`)
- **In workflow:** `echo "${{ vars.ENV_FILE_DEV }}" > .env` (dev branch) / `ENV_FILE_PROD` (main branch)
## CI/CD Toolchain
- **SCM:** Gitea
- **CI:** Gitea Actions — unit tests + build + semantic-release → Gitea Release artifact
- **CD + ops:** Ansible + EDA — triggered by Gitea webhook; backup, migrate, deploy, smoke test, rollback
- **Branch strategy:** `dev` → dev server; `main` → production (manual approval gate in AAP)
### Pipeline Architecture
```
Gitea push (dev or main)
Gitea Actions (.gitea/workflows/build.yaml)
├── yarn test (unit tests — no external deps)
├── echo $ENV_FILE_DEV > .env (or ENV_FILE_PROD for main)
├── yarn semantic-release (bumps version, builds tarball, publishes Gitea Release)
│ └── prepareCmd: yarn generate → tar release-<version>.tar.gz
│ └── publishCmd: attaches tarball to Gitea Release, sets VERSION output
└── webhook → EDA (artifact_url, branch, version)
EDA rulebook receives webhook
AAP workflow template
├── pre-deploy: pg_dump → bab1.mgmt.toal.ca (pre-migration snapshot)
├── migrate: supabase db push → if fails, run rollback SQL + abort
├── deploy: fetch artifact → S3 sync (prod) or nginx swap (dev)
├── post-deploy: yarn test:e2e BASE_URL=<deployed-url>
├── on failure: psql rollback script, redeploy previous artifact, notify
└── on success: notify
```
### Artifact Pattern (Matches bab-app)
- `semantic-release` + `@saithodev/semantic-release-gitea`
- Tarball: `release-<version>.tar.gz` of `.output/public/`
- Attached to Gitea Release
- Webhook payload: `{ "artifact_url": "...", "version": "...", "branch": "..." }`
## Backup Policy
**Scope:** Production only. Dev database is ephemeral — no backups.
**Location:** `bab1.mgmt.toal.ca:/var/backups/oysqn/` (confirm path before first production backup)
| Type | Retention | Max count |
|------|-----------|-----------|
| Regular (daily + pre-migration) | 90 days | 30 |
| Monthly | 12 months | 12 |
Monthly backups taken on the 1st of each month. AAP rotation playbook enforces limits after each backup run.
Filename convention:
- Regular: `oysqn-prod-<YYYYMMDD-HHMMSS>.sql.gz`
- Monthly: `oysqn-prod-<YYYY-MM>-monthly.sql.gz`
## Test Strategy
### Test Tiers
| Tier | Tool | Runs in | Requires |
|------|------|---------|---------|
| Unit | Vitest | Gitea Actions + local | Nothing |
| Integration | Vitest (node) | Local only | Local Supabase + `SUPABASE_SERVICE_ROLE_KEY` |
| E2E | Playwright | Local + AAP post-deploy | Running app + Supabase |
### Unit Test Scope
- **Test:** pure business logic, auth middleware, Pinia store actions, utility functions
- **Do NOT unit test:** Vue components that primarily compose Ionic/PrimeVue — E2E covers these
- **Reason:** Mocking Nuxt auto-imports (`#imports`) creates brittle tests that test mocks, not code
### Integration Test Scope
- Supabase RLS policy correctness (one suite per role)
- Auth flows and session creation
- Run locally: `SUPABASE_SERVICE_ROLE_KEY=<key> yarn test:integration`
### E2E Test Strategy
- **Lives in:** `oysqn.app/tests/e2e/` — versioned with app code, tool: Playwright
- **Parameterized by:** `BASE_URL` env var
- **Local:** `BASE_URL=http://localhost:3000 yarn test:e2e`
- **Post-deploy:** AAP calls `yarn test:e2e` with deployed URL
- **Not in Gitea CI** — runner has no Docker/Podman for local Supabase
## bab-backend-ansible Rewrite Scope
New responsibilities (all Appwrite playbooks retired):
1. **Infra provisioning** — dev server nginx setup, cert management, monitoring
2. **Supabase migrations**`supabase db push` against supabase.com; rollback on failure
3. **Backup** — prod only; scheduled daily + pre-migration `pg_dump``bab1.mgmt.toal.ca`; rotation enforcing retention policy
4. **Frontend deployment** — S3 sync (prod), nginx artifact swap (dev)
5. **Day2 ops** — cert renewal, log rotation, health checks
6. **Secret sync**`sync-gitea-secrets` playbook populates Gitea `ENV_FILE_DEV` / `ENV_FILE_PROD` variables from Vault
7. **EDA rulebooks** — Gitea push webhook → trigger AAP workflow template
## Assumptions
- `main` branch → production requires manual approval gate in AAP before deploy
- Gitea Actions runner is `ubuntu-latest` (same as bab-app)
- Backup path on bab1.mgmt.toal.ca: `/var/backups/oysqn/` (confirm before first production backup)

View File

@@ -0,0 +1,78 @@
# Session Handoff: Splash Page & Magic Link Login
**Date:** 2026-04-12
**Session Duration:** ~30 minutes
**Session Focus:** Implement unauthenticated splash page with logo and login button; implement magic link login page
**Context Usage at Handoff:** Low
## What Was Accomplished
1. **Splash page implemented**`app/pages/index.vue` — unauthenticated users see centered logo + "Log In" button; authenticated users see home content with header/menu
2. **Login page implemented**`app/pages/login.vue` — email input, sends magic link via `auth.sendMagicLink()`, shows confirmation state with mail icon after send
3. **Auth middleware updated**`app/middleware/auth.ts` — added `/` to public routes; unauthenticated users redirected to `/` (not `/login`)
4. **Supabase redirect config updated**`nuxt.config.ts``login` redirect changed from `/login` to `/`; `/` added to `exclude` list
## Exact State of Work in Progress
- Splash + login pages coded but not yet tested end-to-end against local Supabase
- `auth/callback.vue` still a skeleton — magic link redirect will land there; not yet implemented
## Decisions Made This Session
- USE `/` as the unauthenticated landing route INSTEAD OF `/login` BECAUSE the splash/logo page IS the unauthenticated entry point; `/login` is a detail page reached from it — STATUS: confirmed
- MAGIC LINK ONLY on login page (not OTP) BECAUSE user stated "sends a magic link" — STATUS: confirmed; OTP flow (`sendOtp` + `verifyOtp`) exists in auth store but unused by login page
## Key Numbers Generated or Discovered This Session
- Logo file: `public/oysqn_logo.png` (confirmed present)
- Max logo display width: 280px (CSS)
- Max login form width: 400px (CSS)
## Files Created or Modified
| File Path | Action | Description |
|-----------|--------|-------------|
| `app/pages/index.vue` | Modified | Splash (unauthenticated) + home (authenticated) in single page, toggled by `useSupabaseUser()` |
| `app/pages/login.vue` | Modified | Email input → `sendMagicLink()` → confirmation state; uses `IonBackButton` to return to splash |
| `app/middleware/auth.ts` | Modified | Added `/` to public routes; redirects unauthenticated to `/` |
| `nuxt.config.ts` | Modified | `supabase.redirectOptions.login` = `/`; added `/` to `exclude` |
## What the NEXT Session Should Do
1. **First**: Test the auth flow end-to-end: load app → confirm splash shows logo + Login button → tap Login → enter email → check Mailpit (http://127.0.0.1:54324) for magic link → click link → confirm redirect to `/auth/callback` → confirm redirect to `/` (home state)
2. **Then**: Implement `app/pages/auth/callback.vue` — handle magic link redirect (Supabase sets session from URL hash; page should show a spinner, then navigate to `/`)
3. **Then**: Verify `useAuthStore` is auto-imported by Pinia Nuxt module (currently explicitly imported in `login.vue` with `import { useAuthStore } from '~/stores/auth'` — check if explicit import is still needed or if auto-import covers it)
4. **Then**: Run `npx supabase gen types typescript --local > app/types/supabase.ts` to replace placeholder types
5. **Then**: Implement `app/pages/index.vue` home content (authenticated state) — boat list or welcome state
## Open Questions Requiring User Input
- [ ] Should the login page also support OTP (enter token from email) as a fallback, or magic link only? — `sendOtp` + `verifyOtp` exist in auth store but unused — impacts login page UI
- [ ] Should `auth/callback.vue` redirect to a specific page after login (e.g., boats list) or always to `/`? — impacts callback implementation
## Assumptions That Need Validation
- ASSUMED: `@nuxtjs/ionic` + `@pinia/nuxt` auto-imports `useAuthStore` — currently explicitly imported; validate by removing explicit import and testing
- ASSUMED: `sendMagicLink()` in auth store correctly sets `emailRedirectTo` to `/auth/callback` — validate by clicking the email link and checking where it lands
- ASSUMED: `IonRouterOutlet` handles `router-link` on `IonButton` correctly — validate by tapping Login button in browser
## Files to Load Next Session
- `app/pages/auth/callback.vue` — primary implementation target
- `app/pages/index.vue` — may need home content (authenticated state) built out
- `app/stores/auth.ts` — reference for `sendMagicLink` signature and redirect URL
## Dev Environment Reference
```
# Start local Supabase (if not running):
DOCKER_HOST=unix:///run/user/1000/podman/podman.sock npx supabase start
# Start app:
yarn dev
# Mailpit (view magic link emails):
http://127.0.0.1:54324
# Supabase Studio:
http://127.0.0.1:54323
```

View File

@@ -0,0 +1,214 @@
# Plan: bab-backend-ansible Rewrite
**Date:** 2026-04-12
**Status:** DRAFT — awaiting confirmation before execution
**Target repo:** `/home/ptoal/Dev/Projects/bab-backend-ansible`
**Architecture reference:** `docs/context/sdlc-architecture.md`
---
## What Gets Retired
These playbooks are Appwrite-specific and have no equivalent in the new architecture. Delete them.
| File | Reason |
|------|--------|
| `playbooks/install_appwrite.yml` | Appwrite self-hosted, replaced by supabase.com |
| `playbooks/bootstrap_appwrite.yml` | Appwrite self-hosted |
| `playbooks/upgrade_appwrite.yml` | Appwrite self-hosted |
| `playbooks/backup_appwrite.yml` | Appwrite Docker volumes + MariaDB; replaced by pg_dump |
| `playbooks/provision_database.yml` | Appwrite collection/attribute schema; replaced by Supabase migrations |
| `playbooks/provision_users.yml` | Appwrite user provisioning; replaced by Supabase Auth admin API |
| `playbooks/load_data.yml` | Appwrite seed data |
| `playbooks/read_database.yml` | Appwrite diagnostic |
| `playbooks/tasks/patch_appwrite_compose.yml` | Appwrite-specific |
| `playbooks/tasks/upgrade_appwrite_step.yml` | Appwrite-specific |
| `playbooks/templates/appwrite*.j2` | Appwrite config templates |
| `appwrite.json` | Appwrite project definition |
---
## What Gets Kept / Adapted
| File | Action | Notes |
|------|--------|-------|
| `playbooks/install_nginx.yml` | **Keep** | Dev server nginx still needed |
| `playbooks/configure_act_runner.yml` | **Keep** | Gitea runner still on bab1 |
| `playbooks/install_node_exporter.yml` | **Keep** | Monitoring unchanged |
| `playbooks/clean_logs.yml` | **Keep** | Day2 ops unchanged |
| `update_certificates.yml` | **Keep** | TLS cert renewal unchanged |
| `playbooks/deploy_application.yml` | **Adapt** | See below |
| `rulebooks/gitea_webhook.yml` | **Adapt** | See below |
| `rulebooks/alertmanager_listener.yml` | **Keep** | Alerting unchanged |
| `requirements.yml` | **Update** | Remove Appwrite collections; add `community.postgresql` |
### `deploy_application.yml` adaptation
Split into two playbooks:
- `deploy_dev.yml` — nginx artifact swap on bab1 (keep existing logic, update paths/vars)
- `deploy_prod.yml` — S3 sync: fetch artifact tarball → extract → `aws s3 sync` to `{{ s3_bucket }}`
### `rulebooks/gitea_webhook.yml` adaptation
Add branch-based routing: `dev` branch → `oysqn-deploy-dev` job template; `main` branch → `oysqn-deploy-prod` job template (AAP manual approval gate in the workflow).
Updated payload contract: `{ artifact_url, version, branch }` (already matches bab-app pattern).
---
## New Playbooks
### 1. `playbooks/migrate_supabase.yml`
Runs `supabase db push` against the target environment. On failure, executes the matching rollback SQL from `supabase/rollback/` via `psql`, then aborts.
**Vars:** `supabase_project_ref`, `supabase_db_password` (from Vault)
**Steps:**
1. Pre-migration `pg_dump` snapshot → `bab1.mgmt.toal.ca:/var/backups/oysqn/` (pre-migration label)
2. `supabase db push --project-ref {{ supabase_project_ref }}` (via `supabase` CLI on control node or EE)
3. On failure: identify failing migration filename → run `supabase/rollback/<filename>.sql` via psql → fail with message
4. On irreversible migration (rollback file contains `IRREVERSIBLE` marker): halt, alert, do not run rollback SQL
### 2. `playbooks/backup_supabase_prod.yml`
Performs `pg_dump` of production Supabase DB → compressed → stored on bab1 via SSH. Enforces retention policy.
**Vars:** `supabase_postgres_url` (from Vault at `kv/oys/prod/supabase/postgres_url`)
**Steps:**
1. Determine backup type: monthly (1st of month → `oysqn-prod-YYYY-MM-monthly.sql.gz`) or regular (`oysqn-prod-YYYYMMDD-HHMMSS.sql.gz`)
2. `pg_dump "{{ supabase_postgres_url }}" | gzip` → SSH copy to `bab1.mgmt.toal.ca:/var/backups/oysqn/`
3. Rotate: delete regular backups older than 90 days or count > 30; delete monthly backups older than 12 months or count > 12
**Note:** `pg_dump` runs from the AAP EE or control node (not bab1) — postgres_url is the direct Supabase.com connection string.
### 3. `playbooks/sync_gitea_secrets.yml`
Reads `url` + `anon_key` from Vault, constructs `.env` content, updates Gitea repo variables via API.
**Steps:**
1. Vault lookup: `kv/oys/dev/supabase/{url,anon_key}` and `kv/oys/prod/supabase/{url,anon_key}`
2. Construct `ENV_FILE_DEV` and `ENV_FILE_PROD` content (multiline env file format)
3. `PUT /api/v1/repos/{{ gitea_owner }}/{{ gitea_repo }}/actions/variables/ENV_FILE_DEV` (Gitea API, token from `kv/oys/shared/infra/gitea_token`)
4. Same for `ENV_FILE_PROD`
**Trigger:** AAP schedule (daily) + on-demand job template
### 4. `playbooks/deploy_dev.yml`
Fetch artifact tarball → extract → nginx swap on bab1.
**Vars:** `artifact_url`, `version`
**Steps:**
1. Download artifact from Gitea Release URL (auth header: `kv/oys/shared/infra/gitea_token`)
2. Extract to tempdir
3. Rsync/copy to nginx webroot (e.g. `/usr/share/nginx/html/oysqn/`)
4. Cleanup tempdir
### 5. `playbooks/deploy_prod.yml`
Fetch artifact tarball → extract → `aws s3 sync`.
**Vars:** `artifact_url`, `version`
**Steps:**
1. Download artifact (Gitea token auth)
2. Extract to tempdir
3. `aws s3 sync <tempdir>/ s3://{{ s3_bucket }}/ --delete` (AWS creds from Vault `kv/oys/prod/app/`)
4. Cleanup tempdir
---
## EDA Rulebook (updated)
`rulebooks/gitea_webhook.yml` — routes by branch:
```yaml
rules:
- name: Deploy to dev
condition:
all:
- event.payload.data.artifact_url is defined
- event.payload.data.branch == "dev"
action:
run_job_template:
name: oysqn-deploy-dev
organization: OYS
job_args:
extra_vars:
artifact_url: "{{ event.payload.data.artifact_url }}"
version: "{{ event.payload.data.version }}"
- name: Deploy to prod (approval gate in AAP workflow)
condition:
all:
- event.payload.data.artifact_url is defined
- event.payload.data.branch == "main"
action:
run_job_template:
name: oysqn-deploy-prod
organization: OYS
job_args:
extra_vars:
artifact_url: "{{ event.payload.data.artifact_url }}"
version: "{{ event.payload.data.version }}"
```
---
## AAP Workflow Templates
### `oysqn-deploy-dev`
```
pre-migration backup (backup_supabase_prod) [SKIP if no migrations]
→ migrate (migrate_supabase — dev project)
→ deploy (deploy_dev)
→ E2E smoke test (yarn test:e2e BASE_URL=https://dev.oysqn.app)
→ on failure: rollback migration (handled in migrate_supabase), redeploy previous artifact, notify
→ on success: notify
```
### `oysqn-deploy-prod`
```
[manual approval gate]
→ pre-migration backup (backup_supabase_prod)
→ migrate (migrate_supabase — prod project)
→ deploy (deploy_prod)
→ E2E smoke test (yarn test:e2e BASE_URL=https://oysqn.app)
→ on failure: rollback migration, redeploy previous S3 artifact, notify
→ on success: notify
```
---
## Implementation Sequence
1. **Rename/archive existing Appwrite playbooks** — move to `playbooks/archive/appwrite/`; do not delete until new playbooks are tested
2. **Update `requirements.yml`** — add `community.postgresql`, remove Appwrite-specific collections
3. **Write `sync_gitea_secrets.yml`** — lowest risk, standalone, no deploy dependency; test in isolation
4. **Write `backup_supabase_prod.yml`** — test against dev Supabase project first (with a throwaway postgres URL)
5. **Write `migrate_supabase.yml`** — needs `supabase` CLI in EE or on control node; verify CLI availability first
6. **Adapt `deploy_dev.yml`** from existing `deploy_application.yml`
7. **Write `deploy_prod.yml`** (new — S3)
8. **Update EDA rulebook** — branch routing
9. **Configure AAP** — create job templates, workflow templates, approval gate, schedule for backup + secret sync
10. **Decommission Appwrite** — after prod cutover confirmed
---
## Open Questions
- [x] **Supabase CLI in EE**: Not present — added to `ee-demo` via `append_final` build step. `SUPABASE_VERSION` build arg required. Verify asset URL against GitHub releases before first build.
- [x] **pg_dump location**: Not present — added `postgresql [platform:rpm]` to `ee-demo` system deps. Runs from AAP EE control node against Supabase.com postgres_url.
- [x] **EE image**: `amazon.aws` collection added to `ee-demo` `requirements.yml`; `boto3`/`botocore` added to `requirements.txt`. S3 sync via `amazon.aws.s3_sync` module.
- [x] **Dev server URL**: `https://bab.toal.ca` — E2E `BASE_URL` for dev workflow.
- [x] **nginx webroot path for dev**: `/usr/share/nginx/html/` on `bab1.mgmt.toal.ca` — confirmed.
- [x] **Gitea artifact auth**: Token at `kv/oys/bab_gitea` (note: deviates from `kv/oys/(dev|prod|shared)/...` convention — existing secret, use as-is). Gitea base URL: `https://gitea.toal.ca/`. Pass as `Authorization: token <value>` header in `get_url`.
- [ ] **dev postgres_url**: Architecture doc has no `postgres_url` in Vault for dev (`kv/oys/dev/supabase/`). Migration playbook needs it to run rollback SQL via psql. Add `kv/oys/dev/supabase/postgres_url` to Vault before first migration run.
---
## Files Created/Modified Summary
| Action | Path |
|--------|------|
| Retire (move to archive) | `playbooks/install_appwrite.yml` and 9 others (see above) |
| Keep | `install_nginx.yml`, `configure_act_runner.yml`, `install_node_exporter.yml`, `clean_logs.yml`, `update_certificates.yml` |
| Adapt | `deploy_application.yml` → split into `deploy_dev.yml` + `deploy_prod.yml` |
| Adapt | `rulebooks/gitea_webhook.yml` |
| Update | `requirements.yml` |
| New | `migrate_supabase.yml` |
| New | `backup_supabase_prod.yml` |
| New | `sync_gitea_secrets.yml` |
| New | `deploy_dev.yml` |
| New | `deploy_prod.yml` |

5
ionic.config.json Normal file
View File

@@ -0,0 +1,5 @@
{
"name": "oysqn.app",
"integrations": {},
"type": "vue"
}

View File

@@ -26,12 +26,18 @@ export default defineNuxtConfig({
ssr: false,
modules: [
'@nuxtjs/ionic',
'@primevue/nuxt-module',
'@pinia/nuxt',
'@vite-pwa/nuxt',
'@nuxtjs/supabase',
],
ionic: {
css: { utilities: true },
config: { mode: 'md' },
},
primevue: {
options: {
theme: {
@@ -46,28 +52,18 @@ export default defineNuxtConfig({
supabase: {
redirectOptions: {
login: '/login',
login: '/',
callback: '/auth/callback',
exclude: ['/login', '/signup', '/auth/callback'],
exclude: ['/', '/login', '/signup', '/auth/callback'],
},
},
app: {
head: {
meta: [{ name: 'theme-color', content: '#027be3' }],
link: [
{ rel: 'stylesheet', href: 'https://unpkg.com/@ionic/core/css/ionic.bundle.css' },
],
},
},
css: [
'@ionic/vue/css/core.css',
'@ionic/vue/css/normalize.css',
'@ionic/vue/css/structure.css',
'@ionic/vue/css/typography.css',
],
pwa: {
manifest: {
name: 'OYS Borrow a Boat',
@@ -93,12 +89,6 @@ export default defineNuxtConfig({
},
},
vite: {
optimizeDeps: {
include: ['@ionic/vue'],
},
},
runtimeConfig: {
public: {
supabaseUrl: '',

View File

@@ -10,10 +10,16 @@
"postinstall": "nuxt prepare",
"typecheck": "nuxt typecheck",
"test": "vitest run",
"test:watch": "vitest"
"test:watch": "vitest",
"test:integration": "vitest run --config vitest.integration.config.ts"
},
"engines": {
"node": ">=22"
},
"dependencies": {
"@ionic/vue": "^8.5.0",
"@ionic/vue-router": "8.8.2",
"@nuxtjs/ionic": "^1.0.2",
"@nuxtjs/supabase": "^1.5.0",
"@pinia/nuxt": "^0.11.3",
"@primevue/nuxt-module": "^4.5.4",
@@ -26,8 +32,12 @@
"vue-router": "^5.0.3"
},
"devDependencies": {
"@nuxt/test-utils": "^4.0.2",
"@vite-pwa/nuxt": "^1.1.1",
"@vue/test-utils": "^2.4.6",
"happy-dom": "^20.8.9",
"sass-embedded": "^1.98.0",
"supabase": "^2.84.4",
"vitest": "^4.1.0",
"vue-tsc": "^2.0.0"
}

8
supabase/.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
# Supabase
.branches
.temp
# dotenvx
.env.keys
.env.local
.env.*.local

396
supabase/config.toml Normal file
View File

@@ -0,0 +1,396 @@
# For detailed configuration reference documentation, visit:
# https://supabase.com/docs/guides/local-development/cli/config
# A string used to distinguish different Supabase projects on the same host. Defaults to the
# working directory name when running `supabase init`.
project_id = "oysqn.app"
[api]
enabled = true
# Port to use for the API URL.
port = 54321
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
# endpoints. `public` and `graphql_public` schemas are included by default.
schemas = ["public", "graphql_public"]
# Extra schemas to add to the search_path of every request.
extra_search_path = ["public", "extensions"]
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
# for accidental or malicious requests.
max_rows = 1000
[api.tls]
# Enable HTTPS endpoints locally using a self-signed certificate.
enabled = false
# Paths to self-signed certificate pair.
# cert_path = "../certs/my-cert.pem"
# key_path = "../certs/my-key.pem"
[db]
# Port to use for the local database URL.
port = 54322
# Port used by db diff command to initialize the shadow database.
shadow_port = 54320
# Maximum amount of time to wait for health check when starting the local database.
health_timeout = "2m"
# The database major version to use. This has to be the same as your remote database's. Run `SHOW
# server_version;` on the remote database to check.
major_version = 17
[db.pooler]
enabled = false
# Port to use for the local connection pooler.
port = 54329
# Specifies when a server connection can be reused by other clients.
# Configure one of the supported pooler modes: `transaction`, `session`.
pool_mode = "transaction"
# How many server connections to allow per user/database pair.
default_pool_size = 20
# Maximum number of client connections allowed.
max_client_conn = 100
# [db.vault]
# secret_key = "env(SECRET_VALUE)"
[db.migrations]
# If disabled, migrations will be skipped during a db push or reset.
enabled = true
# Specifies an ordered list of schema files that describe your database.
# Supports glob patterns relative to supabase directory: "./schemas/*.sql"
schema_paths = []
[db.seed]
# If enabled, seeds the database after migrations during a db reset.
enabled = true
# Specifies an ordered list of seed files to load during db reset.
# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
sql_paths = ["./seed.sql"]
[db.network_restrictions]
# Enable management of network restrictions.
enabled = false
# List of IPv4 CIDR blocks allowed to connect to the database.
# Defaults to allow all IPv4 connections. Set empty array to block all IPs.
allowed_cidrs = ["0.0.0.0/0"]
# List of IPv6 CIDR blocks allowed to connect to the database.
# Defaults to allow all IPv6 connections. Set empty array to block all IPs.
allowed_cidrs_v6 = ["::/0"]
# Uncomment to reject non-secure connections to the database.
# [db.ssl_enforcement]
# enabled = true
[realtime]
enabled = true
# Bind realtime via either IPv4 or IPv6. (default: IPv4)
# ip_version = "IPv6"
# The maximum length in bytes of HTTP request headers. (default: 4096)
# max_header_length = 4096
[studio]
enabled = true
# Port to use for Supabase Studio.
port = 54323
# External URL of the API server that frontend connects to.
api_url = "http://127.0.0.1"
# OpenAI API Key to use for Supabase AI in the Supabase Studio.
openai_api_key = "env(OPENAI_API_KEY)"
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
# are monitored, and you can view the emails that would have been sent from the web interface.
[inbucket]
enabled = true
# Port to use for the email testing server web interface.
port = 54324
# Uncomment to expose additional ports for testing user applications that send emails.
# smtp_port = 54325
# pop3_port = 54326
# admin_email = "admin@email.com"
# sender_name = "Admin"
[storage]
enabled = true
# The maximum file size allowed (e.g. "5MB", "500KB").
file_size_limit = "50MiB"
# Uncomment to configure local storage buckets
# [storage.buckets.images]
# public = false
# file_size_limit = "50MiB"
# allowed_mime_types = ["image/png", "image/jpeg"]
# objects_path = "./images"
# Allow connections via S3 compatible clients
[storage.s3_protocol]
enabled = true
# Image transformation API is available to Supabase Pro plan.
# [storage.image_transformation]
# enabled = true
# Store analytical data in S3 for running ETL jobs over Iceberg Catalog
# This feature is only available on the hosted platform.
[storage.analytics]
enabled = false
max_namespaces = 5
max_tables = 10
max_catalogs = 2
# Analytics Buckets is available to Supabase Pro plan.
# [storage.analytics.buckets.my-warehouse]
# Store vector embeddings in S3 for large and durable datasets
# This feature is only available on the hosted platform.
[storage.vector]
enabled = false
max_buckets = 10
max_indexes = 5
# Vector Buckets is available to Supabase Pro plan.
# [storage.vector.buckets.documents-openai]
[auth]
enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
# in emails.
site_url = "http://127.0.0.1:3000"
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
additional_redirect_urls = ["https://127.0.0.1:3000"]
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
jwt_expiry = 3600
# JWT issuer URL. If not set, defaults to the local API URL (http://127.0.0.1:<port>/auth/v1).
# jwt_issuer = ""
# Path to JWT signing key. DO NOT commit your signing keys file to git.
# signing_keys_path = "./signing_keys.json"
# If disabled, the refresh token will never expire.
enable_refresh_token_rotation = true
# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
# Requires enable_refresh_token_rotation = true.
refresh_token_reuse_interval = 10
# Allow/disallow new user signups to your project.
enable_signup = true
# Allow/disallow anonymous sign-ins to your project.
enable_anonymous_sign_ins = false
# Allow/disallow testing manual linking of accounts
enable_manual_linking = false
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
minimum_password_length = 6
# Passwords that do not meet the following requirements will be rejected as weak. Supported values
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
password_requirements = ""
[auth.rate_limit]
# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled.
email_sent = 2
# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled.
sms_sent = 30
# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true.
anonymous_users = 30
# Number of sessions that can be refreshed in a 5 minute interval per IP address.
token_refresh = 150
# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users).
sign_in_sign_ups = 30
# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address.
token_verifications = 30
# Number of Web3 logins that can be made in a 5 minute interval per IP address.
web3 = 30
# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
# [auth.captcha]
# enabled = true
# provider = "hcaptcha"
# secret = ""
[auth.email]
# Allow/disallow new user signups via email to your project.
enable_signup = true
# If enabled, a user will be required to confirm any email change on both the old, and new email
# addresses. If disabled, only the new email is required to confirm.
double_confirm_changes = true
# If enabled, users need to confirm their email address before signing in.
enable_confirmations = false
# If enabled, users will need to reauthenticate or have logged in recently to change their password.
secure_password_change = false
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
max_frequency = "1s"
# Number of characters used in the email OTP.
otp_length = 6
# Number of seconds before the email OTP expires (defaults to 1 hour).
otp_expiry = 3600
# Use a production-ready SMTP server
# [auth.email.smtp]
# enabled = true
# host = "smtp.sendgrid.net"
# port = 587
# user = "apikey"
# pass = "env(SENDGRID_API_KEY)"
# admin_email = "admin@email.com"
# sender_name = "Admin"
# Uncomment to customize email template
# [auth.email.template.invite]
# subject = "You have been invited"
# content_path = "./supabase/templates/invite.html"
# Uncomment to customize notification email template
# [auth.email.notification.password_changed]
# enabled = true
# subject = "Your password has been changed"
# content_path = "./templates/password_changed_notification.html"
[auth.sms]
# Allow/disallow new user signups via SMS to your project.
enable_signup = false
# If enabled, users need to confirm their phone number before signing in.
enable_confirmations = false
# Template for sending OTP to users
template = "Your code is {{ .Code }}"
# Controls the minimum amount of time that must pass before sending another sms otp.
max_frequency = "5s"
# Use pre-defined map of phone number to OTP for testing.
# [auth.sms.test_otp]
# 4152127777 = "123456"
# Configure logged in session timeouts.
# [auth.sessions]
# Force log out after the specified duration.
# timebox = "24h"
# Force log out if the user has been inactive longer than the specified duration.
# inactivity_timeout = "8h"
# This hook runs before a new user is created and allows developers to reject the request based on the incoming user object.
# [auth.hook.before_user_created]
# enabled = true
# uri = "pg-functions://postgres/auth/before-user-created-hook"
# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
# [auth.hook.custom_access_token]
# enabled = true
# uri = "pg-functions://<database>/<schema>/<hook_name>"
# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
[auth.sms.twilio]
enabled = false
account_sid = ""
message_service_sid = ""
# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
# Multi-factor-authentication is available to Supabase Pro plan.
[auth.mfa]
# Control how many MFA factors can be enrolled at once per user.
max_enrolled_factors = 10
# Control MFA via App Authenticator (TOTP)
[auth.mfa.totp]
enroll_enabled = false
verify_enabled = false
# Configure MFA via Phone Messaging
[auth.mfa.phone]
enroll_enabled = false
verify_enabled = false
otp_length = 6
template = "Your code is {{ .Code }}"
max_frequency = "5s"
# Configure MFA via WebAuthn
# [auth.mfa.web_authn]
# enroll_enabled = true
# verify_enabled = true
# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
# `twitter`, `x`, `slack`, `spotify`, `workos`, `zoom`.
[auth.external.apple]
enabled = false
client_id = ""
# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
# Overrides the default auth redirectUrl.
redirect_uri = ""
# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
# or any other third-party OIDC providers.
url = ""
# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
skip_nonce_check = false
# If enabled, it will allow the user to successfully authenticate when the provider does not return an email address.
email_optional = false
# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard.
# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting.
[auth.web3.solana]
enabled = false
# Use Firebase Auth as a third-party provider alongside Supabase Auth.
[auth.third_party.firebase]
enabled = false
# project_id = "my-firebase-project"
# Use Auth0 as a third-party provider alongside Supabase Auth.
[auth.third_party.auth0]
enabled = false
# tenant = "my-auth0-tenant"
# tenant_region = "us"
# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
[auth.third_party.aws_cognito]
enabled = false
# user_pool_id = "my-user-pool-id"
# user_pool_region = "us-east-1"
# Use Clerk as a third-party provider alongside Supabase Auth.
[auth.third_party.clerk]
enabled = false
# Obtain from https://clerk.com/setup/supabase
# domain = "example.clerk.accounts.dev"
# OAuth server configuration
[auth.oauth_server]
# Enable OAuth server functionality
enabled = false
# Path for OAuth consent flow UI
authorization_url_path = "/oauth/consent"
# Allow dynamic client registration
allow_dynamic_registration = false
[edge_runtime]
enabled = true
# Supported request policies: `oneshot`, `per_worker`.
# `per_worker` (default) — enables hot reload during local development.
# `oneshot` — fallback mode if hot reload causes issues (e.g. in large repos or with symlinks).
policy = "per_worker"
# Port to attach the Chrome inspector for debugging edge functions.
inspector_port = 8083
# The Deno major version to use.
deno_version = 2
# [edge_runtime.secrets]
# secret_key = "env(SECRET_VALUE)"
[analytics]
enabled = true
port = 54327
# Configure one of the supported backends: `postgres`, `bigquery`.
backend = "postgres"
# Experimental features may be deprecated any time
[experimental]
# Configures Postgres storage engine to use OrioleDB (S3)
orioledb_version = ""
# Configures S3 bucket URL, eg. <bucket_name>.s3-<region>.amazonaws.com
s3_host = "env(S3_HOST)"
# Configures S3 bucket region, eg. us-east-1
s3_region = "env(S3_REGION)"
# Configures AWS_ACCESS_KEY_ID for S3 bucket
s3_access_key = "env(S3_ACCESS_KEY)"
# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
s3_secret_key = "env(S3_SECRET_KEY)"
# [experimental.pgdelta]
# When enabled, pg-delta becomes the active engine for supported schema flows.
# enabled = false
# Directory under `supabase/` where declarative files are written.
# declarative_schema_path = "./declarative"
# JSON string passed through to pg-delta SQL formatting.
# format_options = "{\"keywordCase\":\"upper\",\"indent\":2,\"maxWidth\":80,\"commaStyle\":\"trailing\"}"

View File

@@ -0,0 +1,202 @@
-- OYS Borrow a Boat — Supabase Schema
-- ============================================================
-- TABLES
-- ============================================================
create table public.boats (
id uuid primary key default gen_random_uuid(),
name text not null,
display_name text,
class text,
year integer,
img_src text,
icon_src text,
booking_available boolean not null default true,
required_certs text[] not null default '{}',
max_passengers integer not null default 6,
defects jsonb not null default '[]',
-- defects shape: [{ type: string, severity: string, description: string, detail?: string }]
created_at timestamptz not null default now()
);
create table public.members (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references auth.users(id) on delete cascade,
first_name text not null default '',
last_name text not null default '',
email text not null,
slack_id text,
certifications text[] not null default '{}',
-- cert codes match boats.required_certs values (e.g. 'j27', 'capri25')
role text not null default 'member'
check (role in ('member', 'skipper', 'admin', 'boatswain', 'volunteer', 'instructor')),
created_at timestamptz not null default now(),
unique(user_id)
);
create table public.interval_templates (
id uuid primary key default gen_random_uuid(),
name text not null,
time_tuples jsonb not null default '[]',
-- shape: [[startHHMM, endHHMM], ...] e.g. [["08:00","12:00"],["13:00","17:00"]]
created_at timestamptz not null default now()
);
create table public.intervals (
id uuid primary key default gen_random_uuid(),
boat_id uuid not null references public.boats(id) on delete cascade,
start_time timestamptz not null,
end_time timestamptz not null,
user_id uuid references auth.users(id) on delete set null,
created_at timestamptz not null default now()
);
create index intervals_boat_id_idx on public.intervals(boat_id);
create index intervals_time_range_idx on public.intervals(start_time, end_time);
create table public.reservations (
id uuid primary key default gen_random_uuid(),
boat_id uuid not null references public.boats(id) on delete cascade,
user_id uuid not null references auth.users(id) on delete cascade,
start_time timestamptz not null,
end_time timestamptz not null,
status text not null default 'pending'
check (status in ('pending', 'tentative', 'confirmed')),
reason text not null default '',
comment text not null default '',
member_ids text[] not null default '{}',
guest_ids text[] not null default '{}',
created_at timestamptz not null default now()
);
create index reservations_boat_id_idx on public.reservations(boat_id);
create index reservations_user_id_idx on public.reservations(user_id);
create index reservations_time_range_idx on public.reservations(start_time, end_time);
create table public.reference_docs (
id uuid primary key default gen_random_uuid(),
title text not null,
category text not null,
tags text[] not null default '{}',
subtitle text,
content text not null,
created_at timestamptz not null default now()
);
-- ============================================================
-- RLS
-- ============================================================
alter table public.boats enable row level security;
alter table public.members enable row level security;
alter table public.interval_templates enable row level security;
alter table public.intervals enable row level security;
alter table public.reservations enable row level security;
alter table public.reference_docs enable row level security;
-- boats
create policy "Authenticated users can read boats" on public.boats
for select using (auth.role() = 'authenticated');
create policy "Admins can manage boats" on public.boats
for all using (
exists (
select 1 from public.members
where user_id = auth.uid() and role in ('admin', 'boatswain')
)
);
-- members
create policy "Users can read own member record" on public.members
for select using (user_id = auth.uid());
create policy "Admins can read all members" on public.members
for select using (
exists (
select 1 from public.members m2
where m2.user_id = auth.uid() and m2.role in ('admin', 'boatswain', 'instructor')
)
);
create policy "Users can update own member record" on public.members
for update using (user_id = auth.uid());
create policy "Admins can manage all members" on public.members
for all using (
exists (
select 1 from public.members m2
where m2.user_id = auth.uid() and m2.role = 'admin'
)
);
-- interval_templates
create policy "Authenticated users can read interval templates" on public.interval_templates
for select using (auth.role() = 'authenticated');
create policy "Admins can manage interval templates" on public.interval_templates
for all using (
exists (
select 1 from public.members
where user_id = auth.uid() and role in ('admin', 'boatswain')
)
);
-- intervals
create policy "Authenticated users can read intervals" on public.intervals
for select using (auth.role() = 'authenticated');
create policy "Admins can manage intervals" on public.intervals
for all using (
exists (
select 1 from public.members
where user_id = auth.uid() and role in ('admin', 'boatswain')
)
);
-- reservations
create policy "Users can read own reservations" on public.reservations
for select using (user_id = auth.uid());
create policy "Admins can read all reservations" on public.reservations
for select using (
exists (
select 1 from public.members
where user_id = auth.uid() and role in ('admin', 'boatswain')
)
);
create policy "Authenticated users can read non-private reservation slots" on public.reservations
for select using (auth.role() = 'authenticated');
create policy "Users can create own reservations" on public.reservations
for insert with check (user_id = auth.uid());
create policy "Users can update own reservations" on public.reservations
for update using (user_id = auth.uid());
create policy "Admins can manage all reservations" on public.reservations
for all using (
exists (
select 1 from public.members
where user_id = auth.uid() and role in ('admin', 'boatswain')
)
);
-- reference_docs
create policy "Authenticated users can read reference docs" on public.reference_docs
for select using (auth.role() = 'authenticated');
create policy "Admins can manage reference docs" on public.reference_docs
for all using (
exists (
select 1 from public.members
where user_id = auth.uid() and role = 'admin'
)
);
-- ============================================================
-- TRIGGER: create member record on first sign-in
-- ============================================================
create or replace function public.handle_new_user()
returns trigger language plpgsql security definer as $$
begin
insert into public.members (user_id, email)
values (new.id, new.email)
on conflict (user_id) do nothing;
return new;
end;
$$;
create trigger on_auth_user_created
after insert on auth.users
for each row execute procedure public.handle_new_user();

View File

@@ -0,0 +1,126 @@
/**
* Integration tests for auth session creation via magic link flow.
*
* Requires local Supabase to be running:
* DOCKER_HOST=unix:///run/user/1000/podman/podman.sock npx supabase start
*
* Run with:
* yarn test:integration
*
* Uses Supabase admin API (service role key) to:
* 1. Create a temporary test user
* 2. Generate a magic link token
* 3. Exchange the token for a session
* 4. Verify the session is valid
* 5. Clean up the test user
*/
import { describe, it, expect, beforeAll, afterAll } from 'vitest'
import { createClient } from '@supabase/supabase-js'
// Local Supabase defaults (from `npx supabase status`)
const SUPABASE_URL = process.env.SUPABASE_URL ?? 'http://localhost:54321'
const SUPABASE_ANON_KEY = process.env.SUPABASE_KEY ?? ''
const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY ?? ''
const TEST_EMAIL = `test-auth-${Date.now()}@oysqn.test`
let adminClient: ReturnType<typeof createClient>
let anonClient: ReturnType<typeof createClient>
let testUserId: string | undefined
beforeAll(() => {
if (!SUPABASE_SERVICE_ROLE_KEY) {
throw new Error(
'SUPABASE_SERVICE_ROLE_KEY is required for integration tests.\n' +
'Run `npx supabase status` to get the service_role key and set it as an env var.'
)
}
adminClient = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, {
auth: { autoRefreshToken: false, persistSession: false },
})
anonClient = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
auth: { autoRefreshToken: false, persistSession: false },
})
})
afterAll(async () => {
if (testUserId) {
await adminClient.auth.admin.deleteUser(testUserId)
}
})
describe('magic link login — session creation', () => {
it('creates a confirmed test user via admin API', async () => {
const { data, error } = await adminClient.auth.admin.createUser({
email: TEST_EMAIL,
email_confirm: true,
})
expect(error).toBeNull()
expect(data.user).toBeDefined()
expect(data.user!.email).toBe(TEST_EMAIL)
testUserId = data.user!.id
})
it('generates a magic link token for the test user', async () => {
const { data, error } = await adminClient.auth.admin.generateLink({
type: 'magiclink',
email: TEST_EMAIL,
})
expect(error).toBeNull()
expect(data.properties?.hashed_token).toBeTruthy()
})
it('exchanging the OTP token creates a valid session in Supabase', async () => {
// Generate a fresh link for this test (tokens are single-use)
const { data: linkData, error: linkError } = await adminClient.auth.admin.generateLink({
type: 'magiclink',
email: TEST_EMAIL,
})
expect(linkError).toBeNull()
const token = linkData.properties?.hashed_token
expect(token).toBeTruthy()
// Exchange the token — this is what happens when the user clicks the magic link
// and /auth/callback calls supabase.auth.verifyOtp (hash-based) or
// supabase.auth.exchangeCodeForSession (PKCE)
const { data: sessionData, error: sessionError } = await anonClient.auth.verifyOtp({
email: TEST_EMAIL,
token: token!,
type: 'magiclink',
})
expect(sessionError).toBeNull()
expect(sessionData.session).not.toBeNull()
expect(sessionData.session!.access_token).toBeTruthy()
expect(sessionData.session!.user.email).toBe(TEST_EMAIL)
})
it('session allows access to authenticated Supabase queries', async () => {
const { data: linkData } = await adminClient.auth.admin.generateLink({
type: 'magiclink',
email: TEST_EMAIL,
})
const { data: sessionData } = await anonClient.auth.verifyOtp({
email: TEST_EMAIL,
token: linkData.properties!.hashed_token!,
type: 'magiclink',
})
// Create an authenticated client using the session token
const authedClient = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
auth: { autoRefreshToken: false, persistSession: false },
global: {
headers: { Authorization: `Bearer ${sessionData.session!.access_token}` },
},
})
// Verify the session resolves to the correct user
const { data: { user }, error } = await authedClient.auth.getUser()
expect(error).toBeNull()
expect(user).not.toBeNull()
expect(user!.email).toBe(TEST_EMAIL)
})
})

View File

@@ -0,0 +1,24 @@
// @vitest-environment node
import { describe, it, expect } from 'vitest'
import { checkAuthRedirect } from '~/utils/auth'
describe('checkAuthRedirect', () => {
it.each(['/', '/login', '/signup', '/auth/callback'])(
'returns null for unauthenticated user on public route: %s',
(path) => {
expect(checkAuthRedirect(null, path)).toBeNull()
}
)
it('returns "/" for unauthenticated user on protected route', () => {
expect(checkAuthRedirect(null, '/boats')).toBe('/')
})
it('returns null for authenticated user on protected route', () => {
expect(checkAuthRedirect({ id: 'user-123' }, '/boats')).toBeNull()
})
it('returns null for authenticated user on public route', () => {
expect(checkAuthRedirect({ id: 'user-123' }, '/')).toBeNull()
})
})

17
vitest.config.ts Normal file
View File

@@ -0,0 +1,17 @@
import { defineVitestConfig } from '@nuxt/test-utils/config'
export default defineVitestConfig({
test: {
environment: 'nuxt',
environmentOptions: {
nuxt: {
mock: {
intersectionObserver: true,
indexedDb: true,
},
},
},
include: ['tests/unit/**/*.test.ts'],
exclude: ['tests/integration/**'],
},
})

View File

@@ -0,0 +1,14 @@
import { defineConfig } from 'vitest/config'
export default defineConfig({
test: {
environment: 'node',
include: ['tests/integration/**/*.test.ts'],
testTimeout: 30000,
},
resolve: {
alias: {
'~': new URL('./app', import.meta.url).pathname,
},
},
})

1790
yarn.lock

File diff suppressed because it is too large Load Diff