From 38ea5c7da069cba4064be7feb0b98c7cc69e688b Mon Sep 17 00:00:00 2001 From: karim hassan Date: Mon, 25 Aug 2025 12:39:54 +0000 Subject: [PATCH] Premiere verison route admin operationnelle --- README_API_COMMONJS_FIX.txt | 22 ++++ README_MATCHES.txt | 40 ++++++ README_MATCHES_MIGRATION.txt | 27 ++++ README_MATCHES_TEAMS.txt | 35 ++++++ apply_add_scores_columns.sh | 6 + apply_matches_migration.sh | 10 ++ apply_matches_sql.sh | 13 ++ apply_seed_teams.sh | 73 +++++++++++ backend/package.json | 15 +-- backend/sql/add_scores_columns.sql | 4 + backend/sql/matches.sql | 13 ++ backend/sql/migrate_matches_add_cols.sql | 31 +++++ backend/sql/seed_matches.sql | 6 + backend/src/db.js | 4 - backend/src/index.js | 17 ++- backend/src/routes/matches.js | 150 ++++++++++++++++++++--- backend/src/routes/participants.js | 2 - backend/src/routes/tournaments.js | 12 +- create_match_with_teams.sh | 102 +++++++++++++++ diag_502.sh | 57 +++++++++ fix_and_seed_teams.sh | 106 ++++++++++++++++ 21 files changed, 692 insertions(+), 53 deletions(-) create mode 100644 README_API_COMMONJS_FIX.txt create mode 100644 README_MATCHES.txt create mode 100644 README_MATCHES_MIGRATION.txt create mode 100644 README_MATCHES_TEAMS.txt create mode 100755 apply_add_scores_columns.sh create mode 100755 apply_matches_migration.sh create mode 100755 apply_matches_sql.sh create mode 100755 apply_seed_teams.sh create mode 100644 backend/sql/add_scores_columns.sql create mode 100644 backend/sql/matches.sql create mode 100644 backend/sql/migrate_matches_add_cols.sql create mode 100644 backend/sql/seed_matches.sql create mode 100755 create_match_with_teams.sh create mode 100755 diag_502.sh create mode 100755 fix_and_seed_teams.sh diff --git a/README_API_COMMONJS_FIX.txt b/README_API_COMMONJS_FIX.txt new file mode 100644 index 0000000..dd44eb5 --- /dev/null +++ b/README_API_COMMONJS_FIX.txt @@ -0,0 +1,22 @@ +Super Sunday — Patch API: fix CommonJS + routes +=============================================== + +Problème : l'API plantait avec "require is not defined" car package.json +contenait "type": "module". + +Ce patch : +- Force CommonJS (package.json sans "type":"module") +- Fournit un index Express simple + routes (tournaments, participants) +- API écoute sur port 4000 (env PORT) + +Installation +------------ +unzip -o supersunday_api_commonjs_fix_patch.zip -d . +docker compose build --no-cache api +docker compose up -d api +docker compose logs --since=2m api + +Tests +----- +curl -i http://localhost/api/health +curl -s http://localhost/api/tournaments diff --git a/README_MATCHES.txt b/README_MATCHES.txt new file mode 100644 index 0000000..c417539 --- /dev/null +++ b/README_MATCHES.txt @@ -0,0 +1,40 @@ +Super Sunday — Patch routes Matches (CRUD + score) +================================================= + +API ajoutée : +- POST /api/matches → créer un match +- GET /api/matches?tid=1 → lister (option tournoi) +- GET /api/tournaments/:id/matches → alias pratique +- POST /api/matches/:id/score → mettre à jour score_a/score_b/finished +- PUT /api/matches/:id → mise à jour générique (player_a_id, court, start_time, etc.) +- DELETE /api/matches/:id → supprimer un match + +Fichiers +-------- +- backend/src/routes/matches.js +- backend/src/index.js (montage + alias, optionnel si vous avez déjà un index) +- backend/sql/matches.sql (table matches si besoin) +- backend/sql/seed_matches.sql (seed de démo) +- apply_matches_sql.sh (applique SQL + seed) + +Installation +------------ +1) Dézippez à la racine du projet : + unzip -o supersunday_matches_routes_patch.zip -d . + +2) (Optionnel) créer la table matches + seed : + ./apply_matches_sql.sh + +3) Rebuild + restart API : + docker compose build --no-cache api + docker compose up -d api + docker compose logs --since=2m api + +Tests +----- +curl -s http://localhost/api/health +curl -s http://localhost/api/matches +curl -s -X POST http://localhost/api/matches -H 'Content-Type: application/json' -d '{"tournament_id":1,"court":"Court 1"}' +curl -s http://localhost/api/tournaments/1/matches +curl -s -X POST http://localhost/api/matches/1/score -H 'Content-Type: application/json' -d '{"score_a":6,"score_b":4,"finished":true}' +curl -s -X DELETE http://localhost/api/matches/1 diff --git a/README_MATCHES_MIGRATION.txt b/README_MATCHES_MIGRATION.txt new file mode 100644 index 0000000..fef802f --- /dev/null +++ b/README_MATCHES_MIGRATION.txt @@ -0,0 +1,27 @@ +Super Sunday — Migration SQL pour la table matches +================================================= + +Problème rencontré: +- L'API attend les colonnes `player_a_id`, `player_b_id`, `finished`, `score_a`, `score_b`, etc. +- Votre table `matches` courante ne les contient pas, d'où les erreurs 42703 (undefined column). + +Ce patch ajoute **sans casser l'existant** les colonnes manquantes et définit des valeurs par défaut. + +Fichiers: +- backend/sql/migrate_matches_add_cols.sql +- apply_matches_migration.sh + +Utilisation: +1) Dézippez à la racine du projet : + unzip -o supersunday_matches_migration_patch.zip -d . + +2) Appliquez la migration : + ./apply_matches_migration.sh + +3) Vérifiez la structure : + docker compose exec db psql -U postgres -d supersunday -c '\d+ matches' + +4) Re-testez les routes : + curl -s http://localhost/api/matches + curl -s -X POST http://localhost/api/matches -H 'Content-Type: application/json' -d '{"tournament_id":1,"court":"Court 1"}' + curl -s -X POST http://localhost/api/matches/1/score -H 'Content-Type: application/json' -d '{"score_a":6,"score_b":4,"finished":true}' diff --git a/README_MATCHES_TEAMS.txt b/README_MATCHES_TEAMS.txt new file mode 100644 index 0000000..dfc79c8 --- /dev/null +++ b/README_MATCHES_TEAMS.txt @@ -0,0 +1,35 @@ +Super Sunday — Patch matches (équipes) + colonnes de scores optionnelles +======================================================================= + +Ce patch aligne l’API sur ton schéma actuel : +- `matches` utilise **team_a_id / team_b_id** (et non player_*) +- Le statut par défaut est 'scheduled' +- Le scoring met à jour `score_a`, `score_b`, `finished` si les colonnes existent. + Sinon, l’API renvoie 409 'missing_columns' et tu peux ajouter les colonnes via la migration fournie. + +Fichiers +-------- +- backend/src/routes/matches.js (remplace l’ancien) +- backend/sql/add_scores_columns.sql (ajoute score_a / score_b si absents) +- apply_add_scores_columns.sh (applique la migration) + +Installation +------------ +1) Dézipper à la racine du projet : + unzip -o supersunday_matches_team_columns_patch.zip -d . + +2) (Optionnel si tu veux scorer) ajouter les colonnes de score : + ./apply_add_scores_columns.sh + +3) Rebuild + restart API : + docker compose build --no-cache api + docker compose up -d api + docker compose logs --since=2m api + +Tests +----- +# Créer un match (adapte le tournament_id et les team_* si tu en as) +curl -s -X POST http://localhost/api/matches -H "Content-Type: application/json" --data-raw '{"tournament_id":1,"team_a_id":1,"team_b_id":2,"court":"Court 1"}' + +# Scorer (si colonnes score_* ajoutées) : +curl -s -X POST http://localhost/api/matches/1/score -H "Content-Type: application/json" --data-raw '{"score_a":6,"score_b":4,"finished":true}' diff --git a/apply_add_scores_columns.sh b/apply_add_scores_columns.sh new file mode 100755 index 0000000..ec0cb2b --- /dev/null +++ b/apply_add_scores_columns.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -euo pipefail +echo "📦 Ajout des colonnes score_a/score_b si manquantes…" +docker compose cp backend/sql/add_scores_columns.sql db:/tmp/add_scores_columns.sql +docker compose exec db sh -lc 'psql -U postgres -d supersunday -f /tmp/add_scores_columns.sql' +echo "✅ OK." diff --git a/apply_matches_migration.sh b/apply_matches_migration.sh new file mode 100755 index 0000000..4d29d52 --- /dev/null +++ b/apply_matches_migration.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -euo pipefail + +echo "📦 Copie du script SQL de migration…" +docker compose cp backend/sql/migrate_matches_add_cols.sql db:/tmp/migrate_matches_add_cols.sql + +echo "🗃 Application de la migration sur la base 'supersunday'…" +docker compose exec db sh -lc 'psql -U postgres -d supersunday -f /tmp/migrate_matches_add_cols.sql' + +echo "✅ Migration terminée." diff --git a/apply_matches_sql.sh b/apply_matches_sql.sh new file mode 100755 index 0000000..371aa4b --- /dev/null +++ b/apply_matches_sql.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -euo pipefail +echo "📦 Copie SQL…" +docker compose cp backend/sql/matches.sql db:/tmp/matches.sql +docker compose cp backend/sql/seed_matches.sql db:/tmp/seed_matches.sql || true + +echo "🗃 Applique matches.sql…" +docker compose exec db sh -lc 'psql -U postgres -d supersunday -f /tmp/matches.sql' + +echo "🌱 (Optionnel) seed demo…" +docker compose exec db sh -lc 'psql -U postgres -d supersunday -f /tmp/seed_matches.sql' || true + +echo "✅ Terminé." diff --git a/apply_seed_teams.sh b/apply_seed_teams.sh new file mode 100755 index 0000000..bb57264 --- /dev/null +++ b/apply_seed_teams.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +set -euo pipefail + +say(){ printf "\033[1;36m%s\033[0m\n" "$*"; } +err(){ printf "\033[1;31m%s\033[0m\n" "$*" >&2; } + +DB_SVC="${DB_SVC:-db}" +DB_NAME="${DB_NAME:-supersunday}" +DB_USER="${DB_USER:-postgres}" +WAIT_SECS="${WAIT_SECS:-60}" + +say "▶ Vérification/démarrage du service DB: ${DB_SVC}" +# Démarre s'il n'est pas déjà up +docker compose up -d "${DB_SVC}" + +# Attente active jusqu’à ce que Postgres réponde +say "▶ Attente que Postgres réponde (pg_isready) — timeout ${WAIT_SECS}s" +start_ts=$(date +%s) +while true; do + if docker compose exec -T "${DB_SVC}" sh -lc 'command -v pg_isready >/dev/null 2>&1 && pg_isready -U '"${DB_USER}"' -d '"${DB_NAME}"' -h 127.0.0.1 -p 5432 >/dev/null 2>&1'; then + break + fi + now=$(date +%s) + if [ $(( now - start_ts )) -ge "${WAIT_SECS}" ]; then + err "❌ Postgres ne répond pas après ${WAIT_SECS}s. Vérifie les logs: docker compose logs --since=2m ${DB_SVC}" + exit 1 + fi + sleep 2 +done +say "✅ Postgres prêt." + +say "▶ Injection du SQL de seed (Alpha & Beta)" +docker compose exec -T "${DB_SVC}" sh -lc "cat > /tmp/seed_two_teams.sql" <<'SQL' +-- Crée la table teams si elle n'existe pas (schéma minimal compatible) +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.tables + WHERE table_name='teams' + ) THEN + CREATE TABLE teams ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + tournament_id INT NULL REFERENCES tournaments(id) ON DELETE CASCADE + ); + END IF; +END $$; + +-- Insère Team Alpha / Team Beta. Si la colonne tournament_id existe, +-- associe par défaut au tournoi #1. +DO $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name='teams' AND column_name='tournament_id' + ) THEN + INSERT INTO teams (name, tournament_id) + VALUES ('Team Alpha', 1), ('Team Beta', 1) + ON CONFLICT DO NOTHING; + ELSE + INSERT INTO teams (name) + VALUES ('Team Alpha'), ('Team Beta') + ON CONFLICT DO NOTHING; + END IF; +END $$; +SQL + +docker compose exec -T "${DB_SVC}" sh -lc "psql -U ${DB_USER} -d ${DB_NAME} -f /tmp/seed_two_teams.sql" + +say "▶ Aperçu des équipes:" +docker compose exec -T "${DB_SVC}" sh -lc "psql -U ${DB_USER} -d ${DB_NAME} -c \"SELECT id, name, COALESCE(tournament_id, NULL) AS tournament_id FROM teams ORDER BY id LIMIT 20;\"" + +say '✅ Seed terminé.' \ No newline at end of file diff --git a/backend/package.json b/backend/package.json index ed2d3a8..255985f 100644 --- a/backend/package.json +++ b/backend/package.json @@ -1,18 +1,13 @@ { - "name": "supersunday-backend", + "name": "supersunday-api", "version": "1.0.0", - "type": "module", + "description": "Super Sunday API (CommonJS)", "main": "src/index.js", "scripts": { - "start": "node src/index.js", - "dev": "node --watch src/index.js" + "start": "node src/index.js" }, "dependencies": { - "cors": "^2.8.5", - "dotenv": "^16.6.1", "express": "^4.21.2", - "helmet": "^7.2.0", - "jsonwebtoken": "^9.0.2", - "pg": "^8.11.5" + "pg": "^8.12.0" } -} +} \ No newline at end of file diff --git a/backend/sql/add_scores_columns.sql b/backend/sql/add_scores_columns.sql new file mode 100644 index 0000000..632cc6e --- /dev/null +++ b/backend/sql/add_scores_columns.sql @@ -0,0 +1,4 @@ +-- Adds score_a and score_b if not present +ALTER TABLE IF EXISTS matches + ADD COLUMN IF NOT EXISTS score_a INT NOT NULL DEFAULT 0, + ADD COLUMN IF NOT EXISTS score_b INT NOT NULL DEFAULT 0; diff --git a/backend/sql/matches.sql b/backend/sql/matches.sql new file mode 100644 index 0000000..f4693d3 --- /dev/null +++ b/backend/sql/matches.sql @@ -0,0 +1,13 @@ +-- Ensure basic matches table exists +CREATE TABLE IF NOT EXISTS matches ( + id SERIAL PRIMARY KEY, + tournament_id INT NOT NULL REFERENCES tournaments(id) ON DELETE CASCADE, + player_a_id INT REFERENCES participants(id), + player_b_id INT REFERENCES participants(id), + court TEXT, + start_time TIMESTAMP NULL, + score_a INT DEFAULT 0, + score_b INT DEFAULT 0, + finished BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP DEFAULT now() +); diff --git a/backend/sql/migrate_matches_add_cols.sql b/backend/sql/migrate_matches_add_cols.sql new file mode 100644 index 0000000..7818350 --- /dev/null +++ b/backend/sql/migrate_matches_add_cols.sql @@ -0,0 +1,31 @@ +-- Migrate existing matches table to expected columns + +-- Add columns if they don't exist +ALTER TABLE IF EXISTS matches + ADD COLUMN IF NOT EXISTS player_a_id INT NULL, + ADD COLUMN IF NOT EXISTS player_b_id INT NULL, + ADD COLUMN IF NOT EXISTS court TEXT NULL, + ADD COLUMN IF NOT EXISTS start_time TIMESTAMP NULL, + ADD COLUMN IF NOT EXISTS score_a INT NOT NULL DEFAULT 0, + ADD COLUMN IF NOT EXISTS score_b INT NOT NULL DEFAULT 0, + ADD COLUMN IF NOT EXISTS finished BOOLEAN NOT NULL DEFAULT FALSE; + +-- Optionally add FKs if participants table exists +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='participants') THEN + BEGIN + ALTER TABLE matches + ADD CONSTRAINT IF NOT EXISTS matches_player_a_fk FOREIGN KEY (player_a_id) REFERENCES participants(id) ON DELETE SET NULL, + ADD CONSTRAINT IF NOT EXISTS matches_player_b_fk FOREIGN KEY (player_b_id) REFERENCES participants(id) ON DELETE SET NULL; + EXCEPTION WHEN duplicate_object THEN + -- ignore if constraints already exist + NULL; + END; + END IF; +END $$; + +-- Ensure finished default is respected on existing rows +UPDATE matches SET finished = COALESCE(finished, FALSE); +UPDATE matches SET score_a = COALESCE(score_a, 0); +UPDATE matches SET score_b = COALESCE(score_b, 0); diff --git a/backend/sql/seed_matches.sql b/backend/sql/seed_matches.sql new file mode 100644 index 0000000..b9bdc6c --- /dev/null +++ b/backend/sql/seed_matches.sql @@ -0,0 +1,6 @@ +-- Example seeds (adjust IDs to your existing rows) +INSERT INTO matches (tournament_id, player_a_id, player_b_id, court, start_time) +VALUES + (1, NULL, NULL, 'Court 1', now() + interval '1 hour'), + (1, NULL, NULL, 'Court 2', now() + interval '2 hour') +ON CONFLICT DO NOTHING; diff --git a/backend/src/db.js b/backend/src/db.js index 59d94e7..aaf7bd9 100644 --- a/backend/src/db.js +++ b/backend/src/db.js @@ -1,7 +1,3 @@ -/** - * Minimal PG pool using env vars: - * PGHOST, PGUSER, PGPASSWORD, PGDATABASE, PGPORT - */ const { Pool } = require('pg'); const pool = new Pool({ diff --git a/backend/src/index.js b/backend/src/index.js index db40f14..fc0ff90 100644 --- a/backend/src/index.js +++ b/backend/src/index.js @@ -1,28 +1,25 @@ const express = require('express'); const app = express(); + app.use(express.json()); -// Health app.get('/api/health', (req, res) => res.json({ ok: true, ts: Date.now() })); -// Routes app.use('/api/tournaments', require('./routes/tournaments')); app.use('/api/participants', require('./routes/participants')); +app.use('/api/matches', require('./routes/matches')); -// "by tournament" nicer path +// Aliases convenient paths: app.get('/api/tournaments/:id/participants', (req, res, next) => { req.url = '/by-tournament/' + req.params.id; return require('./routes/participants')(req, res, next); }); +app.get('/api/tournaments/:id/matches', (req, res, next) => { + req.url = '/by-tournament/' + req.params.id; + return require('./routes/matches')(req, res, next); +}); -// 404 for unknown API paths app.use('/api', (req, res) => res.status(404).json({ error: 'not_found' })); const PORT = Number(process.env.PORT || 4000); app.listen(PORT, () => console.log(`API listening on :${PORT}`)); - -app.use('/api/participants', require('./routes/participants')); -app.get('/api/tournaments/:id/participants', (req,res,next)=>{ - req.url = '/by-tournament/' + req.params.id; - return require('./routes/participants')(req,res,next); -}); diff --git a/backend/src/routes/matches.js b/backend/src/routes/matches.js index 599db23..0f2d144 100644 --- a/backend/src/routes/matches.js +++ b/backend/src/routes/matches.js @@ -1,20 +1,138 @@ -import { Router } from 'express'; -import pool from '../db.js'; -import { requireAuth } from '../middleware/auth.js'; +const express = require('express'); +const router = express.Router(); +const { pool } = require('../db'); -const router = Router(); +/** + * Schema attendu (observé chez toi) : + * - matches: id, tournament_id, team_a_id, team_b_id, court?, start_time?, status text default 'scheduled', finished bool + * - (optionnel) score_a, score_b → si absents, on peut les créer via migration fournie + */ -// Score a match (admin) -router.post('/:id/score', requireAuth, async (req,res)=>{ - const mid = Number(req.params.id); - const { score_a, score_b, done } = req.body || {}; - if (!mid) return res.status(400).json({ error:'bad_match_id' }); - const { rows } = await pool.query( - 'update matches set score_a=$2, score_b=$3, done=coalesce($4, done) where id=$1 returning *', - [mid, score_a ?? 0, score_b ?? 0, done] - ); - if (!rows[0]) return res.status(404).json({ error:'match_not_found' }); - res.json(rows[0]); +// Création d'un match (équipes) +router.post('/', async (req, res) => { + const { tournament_id, team_a_id, team_b_id, court, start_time, status } = req.body || {}; + const tid = Number(tournament_id); + if (!tid) return res.status(400).json({ error: 'missing_tournament_id' }); + try { + const { rows } = await pool.query( + `INSERT INTO matches (tournament_id, team_a_id, team_b_id, court, start_time, status) + VALUES ($1, $2, $3, $4, $5, COALESCE($6,'scheduled')) + RETURNING *`, + [tid, team_a_id || null, team_b_id || null, court || null, start_time || null, status || null] + ); + res.status(201).json(rows[0]); + } catch (e) { + console.error('POST /matches error:', e); + res.status(500).json({ error: 'server_error', detail: e.message }); + } }); -export default router; +// Liste (option tournoi) +router.get('/', async (req, res) => { + const tid = Number(req.query.tid || 0); + try { + const { rows } = await pool.query( + tid + ? `SELECT * FROM matches WHERE tournament_id = $1 ORDER BY id DESC` + : `SELECT * FROM matches ORDER BY id DESC`, + tid ? [tid] : [] + ); + res.json(rows); + } catch (e) { + console.error('GET /matches', e); + res.status(500).json({ error: 'server_error' }); + } +}); + +// Alias: GET /api/tournaments/:id/matches +router.get('/by-tournament/:id', async (req, res) => { + const id = Number(req.params.id); + if (!id) return res.status(400).json({ error: 'bad_tournament_id' }); + try { + const { rows } = await pool.query(`SELECT * FROM matches WHERE tournament_id = $1 ORDER BY id DESC`, [id]); + res.json(rows); + } catch (e) { + console.error('GET /matches/by-tournament/:id', e); + res.status(500).json({ error: 'server_error' }); + } +}); + +// Score d'un match +// Si score_a/score_b n'existent pas encore, répond avec un message clair (et fournir migration) +router.post('/:id/score', async (req, res) => { + const id = Number(req.params.id); + if (!id) return res.status(400).json({ error: 'bad_id' }); + const { score_a, score_b, finished } = req.body || {}; + + // Construire SET dynamiquement + const sets = []; + const vals = []; + let i = 1; + if (typeof score_a === 'number') { sets.push(`score_a = $${i++}`); vals.push(score_a); } + if (typeof score_b === 'number') { sets.push(`score_b = $${i++}`); vals.push(score_b); } + if (typeof finished === 'boolean') { sets.push(`finished = $${i++}`); vals.push(finished); } + + if (sets.length === 0) return res.status(400).json({ error: 'no_fields' }); + vals.push(id); + + try { + const q = `UPDATE matches SET ${sets.join(', ')} WHERE id = $${i} RETURNING *`; + const { rows } = await pool.query(q, vals); + if (rows.length === 0) return res.status(404).json({ error: 'not_found' }); + res.json(rows[0]); + } catch (e) { + // Si colonne inexistante → proposer la migration + if (e && e.code === '42703') { + return res.status(409).json({ + error: 'missing_columns', + detail: 'Les colonnes score_a/score_b sont absentes. Exécutez la migration SQL fournie pour les ajouter.' + }); + } + console.error('POST /matches/:id/score', e); + res.status(500).json({ error: 'server_error' }); + } +}); + +// Update générique (équipes, court, start_time, status, finished) +router.put('/:id', async (req, res) => { + const id = Number(req.params.id); + if (!id) return res.status(400).json({ error: 'bad_id' }); + + const allowed = ['team_a_id','team_b_id','court','start_time','status','finished','tournament_id']; + const sets = []; + const vals = []; + let i = 1; + for (const k of allowed) { + if (Object.prototype.hasOwnProperty.call(req.body, k)) { + sets.push(`${k} = $${i++}`); + vals.push(req.body[k]); + } + } + if (sets.length === 0) return res.status(400).json({ error: 'no_fields' }); + vals.push(id); + + try { + const { rows } = await pool.query(`UPDATE matches SET ${sets.join(', ')} WHERE id = $${i} RETURNING *`, vals); + if (rows.length === 0) return res.status(404).json({ error: 'not_found' }); + res.json(rows[0]); + } catch (e) { + console.error('PUT /matches/:id', e); + res.status(500).json({ error: 'server_error' }); + } +}); + +// Suppression +router.delete('/:id', async (req, res) => { + const id = Number(req.params.id); + if (!id) return res.status(400).json({ error: 'bad_id' }); + try { + const result = await pool.query(`DELETE FROM matches WHERE id = $1`, [id]); + if (result.rowCount === 0) return res.status(404).json({ error: 'not_found' }); + res.json({ ok: true, id }); + } catch (e) { + console.error('DELETE /matches/:id', e); + res.status(500).json({ error: 'server_error' }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/participants.js b/backend/src/routes/participants.js index b892209..58e4e60 100644 --- a/backend/src/routes/participants.js +++ b/backend/src/routes/participants.js @@ -2,7 +2,6 @@ const express = require('express'); const router = express.Router(); const { pool } = require('../db'); -// GET /api/tournaments/:id/participants router.get('/by-tournament/:id', async (req, res) => { const id = Number(req.params.id); if (!id) return res.status(400).json({ error: 'bad_tournament_id' }); @@ -18,7 +17,6 @@ router.get('/by-tournament/:id', async (req, res) => { } }); -// POST /api/participants { tournament_id, full_name } router.post('/', async (req, res) => { const { tournament_id, full_name } = req.body || {}; const tid = Number(tournament_id); diff --git a/backend/src/routes/tournaments.js b/backend/src/routes/tournaments.js index c63433b..2213639 100644 --- a/backend/src/routes/tournaments.js +++ b/backend/src/routes/tournaments.js @@ -2,7 +2,6 @@ const express = require('express'); const router = express.Router(); const { pool } = require('../db'); -// GET /api/tournaments -> liste router.get('/', async (req, res) => { try { const { rows } = await pool.query( @@ -15,7 +14,6 @@ router.get('/', async (req, res) => { } }); -// POST /api/tournaments -> créer router.post('/', async (req, res) => { const { name, location, start_date, end_date } = req.body || {}; if (!name || !start_date || !end_date) { @@ -35,7 +33,6 @@ router.post('/', async (req, res) => { } }); -// DELETE /api/tournaments/:id -> supprimer (avec nettoyage dépendances si pas de CASCADE) router.delete('/:id', async (req, res) => { const id = Number(req.params.id); if (!id) return res.status(400).json({ error: 'bad_id' }); @@ -43,18 +40,11 @@ router.delete('/:id', async (req, res) => { const client = await pool.connect(); try { await client.query('BEGIN'); - - // Si votre schéma n'a PAS de ON DELETE CASCADE, on nettoie à la main : - // matches → participants → (tournament row) try { await client.query('DELETE FROM matches WHERE tournament_id = $1', [id]); } catch {} try { await client.query('DELETE FROM participants WHERE tournament_id = $1', [id]); } catch {} - const result = await client.query('DELETE FROM tournaments WHERE id = $1', [id]); await client.query('COMMIT'); - - if (result.rowCount === 0) { - return res.status(404).json({ error: 'not_found' }); - } + if (result.rowCount === 0) return res.status(404).json({ error: 'not_found' }); res.json({ ok: true, id }); } catch (e) { await client.query('ROLLBACK'); diff --git a/create_match_with_teams.sh b/create_match_with_teams.sh new file mode 100755 index 0000000..75cfb51 --- /dev/null +++ b/create_match_with_teams.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Usage: ./create_match_with_teams.sh [TOURNAMENT_ID] [TEAM_A_NAME] [TEAM_B_NAME] +TID="${1:-1}" +TEAM_A="${2:-Team Alpha}" +TEAM_B="${3:-Team Beta}" + +DB_SVC="${DB_SVC:-db}" +DB_NAME="${DB_NAME:-supersunday}" +DB_USER="${DB_USER:-postgres}" + +say(){ printf "\033[1;36m%s\033[0m\n" "$*"; } +err(){ printf "\033[1;31m%s\033[0m\n" "$*" >&2; } + +say "▶ Vérifie que la DB tourne" +docker compose up -d "${DB_SVC}" >/dev/null + +say "▶ S’assure que teams a (id, name, tournament_id)" +docker compose exec -T "${DB_SVC}" sh -lc "cat > /tmp/ensure_teams_shape.sql" <<'SQL' +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='teams') THEN + CREATE TABLE teams (id SERIAL PRIMARY KEY); + END IF; + + IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='teams' AND column_name='name') THEN + ALTER TABLE teams ADD COLUMN name TEXT; + UPDATE teams SET name = COALESCE(name, 'Team '||id); + END IF; + + IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='teams' AND column_name='tournament_id') THEN + ALTER TABLE teams ADD COLUMN tournament_id INT NULL; + END IF; +END $$; +SQL +docker compose exec -T "${DB_SVC}" sh -lc "psql -U ${DB_USER} -d ${DB_NAME} -f /tmp/ensure_teams_shape.sql" + +say "▶ Upsert des équipes '${TEAM_A}' et '${TEAM_B}' pour le tournoi ${TID}" +docker compose exec -T "${DB_SVC}" sh -lc "cat > /tmp/upsert_teams.sql" <<'SQL' +\set team_a :'TEAM_A' +\set team_b :'TEAM_B' +\set tid :TID + +WITH +sel_a AS ( + SELECT id FROM teams + WHERE name = :'team_a' AND (tournament_id = :tid OR tournament_id IS NULL) + ORDER BY id LIMIT 1 +), +ins_a AS ( + INSERT INTO teams (name, tournament_id) + SELECT :'team_a', :tid + WHERE NOT EXISTS (SELECT 1 FROM sel_a) + RETURNING id +), +a AS ( + SELECT COALESCE((SELECT id FROM sel_a),(SELECT id FROM ins_a)) AS id +), +sel_b AS ( + SELECT id FROM teams + WHERE name = :'team_b' AND (tournament_id = :tid OR tournament_id IS NULL) + ORDER BY id LIMIT 1 +), +ins_b AS ( + INSERT INTO teams (name, tournament_id) + SELECT :'team_b', :tid + WHERE NOT EXISTS (SELECT 1 FROM sel_b) + RETURNING id +), +b AS ( + SELECT COALESCE((SELECT id FROM sel_b),(SELECT id FROM ins_b)) AS id +) +SELECT a.id AS team_a_id, b.id AS team_b_id FROM a, b; +SQL + +IDS=$( + docker compose exec -T "${DB_SVC}" sh -lc \ + "psql -U ${DB_USER} -d ${DB_NAME} -t -A -F',' \ + -v TEAM_A=\"${TEAM_A}\" -v TEAM_B=\"${TEAM_B}\" -v TID=${TID} \ + -f /tmp/upsert_teams.sql" +) + +TEAM_A_ID="${IDS%%,*}" +TEAM_B_ID="${IDS##*,}" + +if [[ -z "${TEAM_A_ID}" || -z "${TEAM_B_ID}" ]]; then + err "❌ Impossible de déterminer les IDs d'équipes. Résultat: '${IDS}'" + exit 1 +fi +say "✅ Teams: ${TEAM_A} -> ${TEAM_A_ID}, ${TEAM_B} -> ${TEAM_B_ID}" + +say "▶ Création du match via API" +JSON_PAYLOAD=$(printf '{"tournament_id":%s,"team_a_id":%s,"team_b_id":%s,"court":"Court 1"}' "$TID" "$TEAM_A_ID" "$TEAM_B_ID") +curl -s -X POST http://localhost/api/matches \ + -H "Content-Type: application/json" \ + --data-raw "${JSON_PAYLOAD}" +echo + +say "▶ Matches du tournoi ${TID}" +curl -s "http://localhost/api/tournaments/${TID}/matches" +echo \ No newline at end of file diff --git a/diag_502.sh b/diag_502.sh new file mode 100755 index 0000000..fbcbaa2 --- /dev/null +++ b/diag_502.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +set -euo pipefail + +say(){ printf "\033[1;36m%s\033[0m\n" "$*"; } +err(){ printf "\033[1;31m%s\033[0m\n" "$*" >&2; } + +say "▶ docker compose ps" +docker compose ps || { err "❌ Pas dans le bon dossier ?"; exit 1; } + +say "▶ Test host → Nginx" +if curl -fsS http://localhost/api/health >/dev/null 2>&1; then + say "✅ Host OK: http://localhost/api/health" + exit 0 +else + err "⚠️ Host 502 (ou autre). On continue…" +fi + +say "▶ API container up ?" +if ! docker compose ps | awk 'NR>1{print $1,$4}' | grep -q 'api .*Up'; then + err "❌ Service api pas UP → démarrage…" + docker compose up -d api +fi + +say "▶ Test API inside api container" +docker compose exec api sh -lc 'apk add --no-cache curl >/dev/null 2>&1 || true; curl -i http://localhost:4000/api/health || true' + +say "▶ Test depuis Nginx vers api:4000" +if docker compose exec web sh -lc 'wget -q -O- http://api:4000/api/health >/dev/null 2>&1'; then + say "✅ Nginx atteint api:4000" + TARGET="api:4000" +else + err "⚠️ api:4000 KO. Test vers supersunday_api:4000…" + if docker compose exec web sh -lc 'wget -q -O- http://supersunday_api:4000/api/health >/dev/null 2>&1'; then + say "✅ Nginx atteint supersunday_api:4000" + TARGET="supersunday_api:4000" + else + err "❌ Nginx n'atteint pas l'API par le réseau docker." + say "Logs web (Nginx) :"; docker compose logs --since=2m web || true + say "Logs api :"; docker compose logs --since=2m api || true + exit 1 + fi +fi + +say "▶ Vérifie/force nginx.conf → upstream ${TARGET}" +# Remplace le bloc upstream par la bonne cible +if [ -f nginx/nginx.conf ]; then + # Portable sur macOS (BSD sed) : crée sauvegarde .bak + sed -i '' -e "s#upstream api_upstream {[^}]*}#upstream api_upstream { server ${TARGET}; keepalive 16; }#g" nginx/nginx.conf + docker compose restart web +else + err "❌ nginx/nginx.conf introuvable" + exit 1 +fi + +say "▶ Re-test host → Nginx" +curl -i http://localhost/api/health || { err "❌ Toujours KO"; exit 1; } +say "✅ OK via Nginx" \ No newline at end of file diff --git a/fix_and_seed_teams.sh b/fix_and_seed_teams.sh new file mode 100755 index 0000000..5cb8c2e --- /dev/null +++ b/fix_and_seed_teams.sh @@ -0,0 +1,106 @@ +#!/usr/bin/env bash +set -euo pipefail + +say(){ printf "\033[1;36m%s\033[0m\n" "$*"; } +err(){ printf "\033[1;31m%s\033[0m\n" "$*" >&2; } + +DB_SVC="${DB_SVC:-db}" +DB_NAME="${DB_NAME:-supersunday}" +DB_USER="${DB_USER:-postgres}" +WAIT_SECS="${WAIT_SECS:-60}" + +say "▶ Démarrage/validation du service DB: ${DB_SVC}" +docker compose up -d "${DB_SVC}" >/dev/null + +say "▶ Attente que Postgres réponde (pg_isready)" +start=$(date +%s) +while true; do + if docker compose exec -T "${DB_SVC}" sh -lc 'command -v pg_isready >/dev/null 2>&1 && pg_isready -U '"${DB_USER}"' -d '"${DB_NAME}"' -h 127.0.0.1 -p 5432 >/dev/null 2>&1'; then + break + fi + [ $(( $(date +%s) - start )) -ge "${WAIT_SECS}" ] && { err "❌ Postgres ne répond pas"; exit 1; } + sleep 2 +done +say "✅ Postgres prêt." + +say "▶ Migration de la table teams (ajout name/tournament_id si absents, mapping depuis colonnes existantes)" +docker compose exec -T "${DB_SVC}" sh -lc "cat > /tmp/fix_teams.sql" <<'SQL' +-- Crée la table si absente (structure minimale) +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='teams') THEN + CREATE TABLE teams ( + id SERIAL PRIMARY KEY + -- on ajoutera name et tournament_id dans le bloc suivant + ); + END IF; +END $$; + +-- Ajoute la colonne name si absente +DO $$ +DECLARE + src_col text; +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name='teams' AND column_name='name' + ) THEN + ALTER TABLE teams ADD COLUMN name TEXT; + -- Cherche une colonne plausible pour peupler name + SELECT c.column_name INTO src_col + FROM information_schema.columns c + WHERE c.table_name='teams' + AND c.column_name IN ('team_name','title','label','nom','libelle') + ORDER BY c.column_name + LIMIT 1; + + IF src_col IS NOT NULL THEN + EXECUTE format('UPDATE teams SET name = %I WHERE name IS NULL', src_col); + END IF; + + -- S'assure que name n'est pas NULL sur les lignes existantes + UPDATE teams SET name = COALESCE(name, 'Team '||id); + END IF; +END $$; + +-- Ajoute tournament_id si absent (référence tournaments(id) si possible) +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name='teams' AND column_name='tournament_id' + ) THEN + ALTER TABLE teams ADD COLUMN tournament_id INT NULL; + -- Ajoute une FK si la table tournaments existe + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='tournaments') THEN + BEGIN + ALTER TABLE teams + ADD CONSTRAINT IF NOT EXISTS teams_tournament_fk + FOREIGN KEY (tournament_id) REFERENCES tournaments(id) ON DELETE CASCADE; + EXCEPTION WHEN duplicate_object THEN + -- ignore + NULL; + END; + END IF; + END IF; +END $$; + +-- Insère Alpha/Beta seulement s'ils n'existent pas déjà par le nom +WITH have_alpha AS ( + SELECT 1 FROM teams WHERE name ILIKE 'team alpha' LIMIT 1 +), have_beta AS ( + SELECT 1 FROM teams WHERE name ILIKE 'team beta' LIMIT 1 +) +INSERT INTO teams (name, tournament_id) +SELECT v.name, v.tid +FROM (VALUES ('Team Alpha', 1), ('Team Beta', 1)) AS v(name, tid) +WHERE (v.name='Team Alpha' AND NOT EXISTS (SELECT 1 FROM have_alpha)) + OR (v.name='Team Beta' AND NOT EXISTS (SELECT 1 FROM have_beta)); +SQL + +docker compose exec -T "${DB_SVC}" sh -lc "psql -U ${DB_USER} -d ${DB_NAME} -f /tmp/fix_teams.sql" + +say "▶ Aperçu teams (id, name, tournament_id):" +docker compose exec -T "${DB_SVC}" sh -lc "psql -U ${DB_USER} -d ${DB_NAME} -c \"SELECT id, name, tournament_id FROM teams ORDER BY id LIMIT 20;\"" + +say "✅ Migration/seed teams terminé." \ No newline at end of file