diff --git a/.gitignore b/.gitignore index 5ceb386..d75edea 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ venv +__pycache__ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 90d56a8..4d77501 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,9 +3,10 @@ FROM python:3.11-slim # Set working directory WORKDIR /app -# Install system dependencies for psycopg2 +# Install system dependencies for mysql-connector-python and tkinter RUN apt-get update && apt-get install -y \ - gcc libpq-dev && \ + gcc libmariadb-dev pkg-config mariadb-client \ + python3-tk xvfb && \ rm -rf /var/lib/apt/lists/* # Install Python dependencies @@ -14,6 +15,3 @@ RUN pip install --no-cache-dir -r requirements.txt # Copy source code COPY . . - -CMD ["python", "app.py"] - diff --git a/Makefile b/Makefile index 21c821d..134445c 100644 --- a/Makefile +++ b/Makefile @@ -1,34 +1,61 @@ -PROJECT_NAME=postgres_service +PROJECT_NAME=mysql_service COMPOSE=docker-compose -p $(PROJECT_NAME) -## up: Start the postgres and app containers in the background +# Check if Docker is running +.PHONY: check-docker +check-docker: + @docker info >nul 2>&1 || (echo. && echo ERROR: Docker is not running. Please start Docker Desktop and try again. && echo. && exit /b 1) + +## up: Start the mysql, api (port 5000), and api-test (port 5001) containers and seed default locations .PHONY: up -up: +up: check-docker + $(COMPOSE) up -d + @echo "Waiting for services to be ready..." + @timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true + @$(COMPOSE) exec api python src/scripts/seed_locations.py + +## up-empty: Start the mysql, api (port 5000), and api-test (port 5001) containers without seeding data +.PHONY: up-empty +up-empty: check-docker $(COMPOSE) up -d ## down: Stop and remove containers (keeps volumes) .PHONY: down -down: +down: check-docker $(COMPOSE) down +## status: Show status of all services +.PHONY: status +status: check-docker + $(COMPOSE) ps + ## logs: Follow logs from all services .PHONY: logs -logs: +logs: check-docker $(COMPOSE) logs -f -## psql: Open a psql shell into the postgres container -.PHONY: psql -psql: - $(COMPOSE) exec db psql -U postgres -d mydb +## mysql: Open a mysql shell into the mysql container +.PHONY: mysql +mysql: check-docker + $(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb + +## test: Open GUI test runner (starts services if not running, including test API on port 5001) +.PHONY: test +test: check-docker + @echo "Ensuring services are running..." + @$(COMPOSE) up -d + @echo "Waiting for services to be ready..." + @timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true + @python src/scripts/test_gui.py ## build: Build or rebuild services .PHONY: build -build: +build: check-docker $(COMPOSE) build ## clean: Remove containers, networks, volumes, and orphans .PHONY: clean -clean: +clean: check-docker $(COMPOSE) down -v --remove-orphans docker volume prune -f diff --git a/README.md b/README.md index 0914a52..31a528d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# PostgreSQL Service Setup +# Mil-SQL API Service -This project provides a simple Dockerized PostgreSQL database and a Python service for interacting with it. +This project provides a Dockerized MySQL database with a Flask REST API for managing locations and supplies inventory. --- @@ -8,6 +8,7 @@ This project provides a simple Dockerized PostgreSQL database and a Python servi - Docker (>= 20.10) - Docker Compose (v2 recommended) - Make (optional, for convenience) +- Python 3.11+ (for local test GUI) --- @@ -17,3 +18,172 @@ This project provides a simple Dockerized PostgreSQL database and a Python servi ```bash make up ``` + +This will: +- Start a MySQL 8.0 database container +- Start a Flask API container on port 5000 (production database: `mydb`) +- Start a Flask API container on port 5001 (test database: `mydb_test`) +- Automatically initialize the database schema for both APIs (idempotent - safe to run multiple times) + +### 2. Run tests +```bash +make test +``` + +Opens a GUI test runner that allows you to: +- Validate table creation +- Test locations and supplies data insertion +- Interactively move supplies between containers + +--- + +## API Endpoints + +The Flask API runs on two ports: +- **Production API**: `http://localhost:5000` (database: `mydb`) +- **Test API**: `http://localhost:5001` (database: `mydb_test`) + +Both APIs provide the same endpoints: + +### Locations +- `GET /api/locations` - Get all locations +- `GET /api/locations/` - Get a specific location +- `POST /api/locations` - Create a new location +- `PUT /api/locations/` - Update a location +- `DELETE /api/locations/` - Delete a location + +### Supplies +- `GET /api/supplies` - Get all supplies (optional `?location=` filter) +- `GET /api/supplies/` - Get a specific supply +- `POST /api/supplies` - Create/update a supply (adds to existing if same name+location) +- `PUT /api/supplies/` - Update supply amount or last_order_date +- `DELETE /api/supplies/` - Delete a supply +- `POST /api/supplies/move` - Move supplies between locations + +### Health Check +- `GET /health` - API health status + +--- + +## Database Initialization + +The Flask API (`src/api/app.py`) handles database schema initialization automatically on startup. It: + +1. **Creates tables** in the correct dependency order (idempotent) +2. **Only creates tables that don't exist** - safe to run multiple times +3. **Never drops existing data** - production-safe +4. **Handles dependencies** - automatically sorts tables by foreign key relationships + +### Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `DATABASE_URL` | `mysql://mysqluser:mysqlpassword@db:3306/mydb` | MySQL connection string | +| `DB_HOST` | `db` | Database hostname | +| `DB_PORT` | `3306` | Database port | +| `DB_USER` | `mysqluser` | Database username | +| `DB_PASSWORD` | `mysqlpassword` | Database password | +| `DB_NAME` | `mydb` | Database name | +| `PORT` | `5000` | Flask API port | +| `MYSQL_ROOT_PASSWORD` | `rootpassword` | MySQL root password | + +--- + +## Testing + +### Test API + +A separate test API instance runs on port 5001 and connects to the `mydb_test` database. This allows test scripts to: +- Use the API endpoints without affecting production data +- Test API functionality in isolation +- Access the test database through the same API interface as production + +Test scripts can use the `TEST_API_URL` constant from `helpers.py`: +```python +from helpers import TEST_API_URL +import requests + +# Use test API (http://localhost:5001/api) +response = requests.get(f"{TEST_API_URL}/locations") +``` + +### Test Scripts + +- **`test_tables.py`** - Validates table creation in a test database +- **`test_locations_supplies.py`** - Tests locations and supplies with interactive GUI +- **`test_gui.py`** - GUI test runner that discovers and runs all test scripts + +### Running Tests + +```bash +make test +``` + +This opens a GUI window where you can: +- Run individual tests +- Run all tests +- View test results in real-time +- See table contents in interactive viewers + +**Note**: The test API (`api-test` service) starts automatically with `make up` or `make test` and connects to the `mydb_test` database. + +--- + +## Database Schema + +The database includes the following tables: +- `teams` - Team definitions +- `locations` - Storage locations (with coordinates for frontend positioning) +- `members` - Team members +- `weekly_reports` - Member progress reports +- `supplies` - Inventory items (with unique constraint on name+location) +- `orders` - Purchase orders +- `applicants` - Applicant information + +### Key Features + +- **Supplies normalization**: Each supply can exist in multiple locations with different amounts +- **Unique constraint**: `(name, location)` ensures no duplicate supply entries per location +- **Foreign keys**: Supplies reference locations, maintaining referential integrity + +See `src/sql/` for table definitions. + +--- + +## Available Commands + +| Command | Description | +|---------|-------------| +| `make up` | Start all services | +| `make down` | Stop all services | +| `make build` | Build or rebuild services | +| `make test` | Run GUI test runner | +| `make mysql` | Open MySQL shell | +| `make logs` | View service logs | +| `make status` | Show service status | +| `make clean` | Remove containers, volumes, and networks | + +--- + +## Project Structure + +``` +mil-sql/ +├── src/ +│ ├── api/ # Flask API application +│ │ ├── app.py # Main Flask app +│ │ ├── db.py # Database connection pool +│ │ ├── models/ # Data models +│ │ └── routes/ # API route handlers +│ ├── scripts/ # Utility and test scripts +│ │ ├── helpers.py # Shared database helpers +│ │ ├── test_gui.py # GUI test runner +│ │ ├── test_tables.py +│ │ └── test_locations_supplies.py +│ └── sql/ # SQL table definitions +│ └── */table_*.sql +├── docker-compose.yaml # Service definitions +├── Dockerfile # Python API container +├── requirements.txt # Python dependencies +└── Makefile # Convenience commands +``` \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index 4907c24..86bfdd7 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,31 +1,64 @@ -version: "3.9" - services: db: - image: postgres:16 - container_name: postgres_db + image: mysql:8.0 + container_name: mysql_db restart: unless-stopped environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: mydb + MYSQL_ROOT_PASSWORD: rootpassword + MYSQL_DATABASE: mydb + MYSQL_USER: mysqluser + MYSQL_PASSWORD: mysqlpassword + ports: + - "3306:3306" + volumes: + - db_data:/var/lib/mysql + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "root", "-prootpassword"] + interval: 10s + timeout: 5s + retries: 5 + + api: + build: . + container_name: mysql_api + depends_on: + db: + condition: service_healthy + environment: + DATABASE_URL: mysql://mysqluser:mysqlpassword@db:3306/mydb + DB_HOST: db + DB_PORT: 3306 + DB_USER: mysqluser + DB_PASSWORD: mysqlpassword + DB_NAME: mydb + MYSQL_ROOT_PASSWORD: rootpassword + PORT: 5000 ports: - - "5432:5432" + - "5000:5000" volumes: - - db_data:/var/lib/postgresql/data + - .:/app + command: ["python", "-m", "src.api.app"] - app: + api-test: build: . - container_name: postgres_app + container_name: mysql_api_test depends_on: - - db + db: + condition: service_healthy environment: - DATABASE_URL: postgres://postgres:postgres@db:5432/mydb + DATABASE_URL: mysql://mysqluser:mysqlpassword@db:3306/mydb_test + DB_HOST: db + DB_PORT: 3306 + DB_USER: mysqluser + DB_PASSWORD: mysqlpassword + DB_NAME: mydb_test + MYSQL_ROOT_PASSWORD: rootpassword + PORT: 5001 + ports: + - "5001:5001" volumes: - .:/app - - ./src/sql:/docker-entrypoint-initb.d - command: ["python", "src/scripts/app.py"] + command: ["python", "-m", "src.api.app"] volumes: db_data: - diff --git a/requirements.txt b/requirements.txt index b67da01..c032353 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,7 @@ -psycopg2-binary==2.9.9 +mysql-connector-python==8.2.0 sqlalchemy==2.0.30 alembic==1.13.2 +flask==3.0.0 +flask-cors==4.0.0 +requests==2.31.0 diff --git a/src/api/__init__.py b/src/api/__init__.py new file mode 100644 index 0000000..4b449e7 --- /dev/null +++ b/src/api/__init__.py @@ -0,0 +1,2 @@ +# API package + diff --git a/src/api/app.py b/src/api/app.py new file mode 100644 index 0000000..517c3f5 --- /dev/null +++ b/src/api/app.py @@ -0,0 +1,100 @@ +""" +Flask API application for mil-sql. +""" +import os +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from flask import Flask +from flask_cors import CORS +from src.api.routes.locations import locations_bp +from src.api.routes.supplies import supplies_bp + +# Import helpers for schema initialization +from src.scripts.helpers import ( + get_sql_base_path, + discover_table_files, + topological_sort_tables, + table_exists, + execute_sql_file +) +from src.api.db import get_db + +app = Flask(__name__) +CORS(app) # Enable CORS for milventory frontend + +# Register blueprints +app.register_blueprint(locations_bp, url_prefix='/api/locations') +app.register_blueprint(supplies_bp, url_prefix='/api/supplies') + + +def initialize_schema(): + """Initialize database schema if tables are missing.""" + try: + print("🔍 Checking database schema...") + conn = get_db() + cur = conn.cursor() + + # Get SQL base path + SQL_BASE_PATH = get_sql_base_path(__file__) + + # Discover all table files + table_files = discover_table_files(SQL_BASE_PATH) + + if not table_files: + print("⚠ No table_*.sql files found, skipping initialization") + cur.close() + conn.close() + return + + # Sort tables by dependency order + sorted_tables = topological_sort_tables(table_files) + + # Check which tables are missing + missing_tables = [] + for table_name, sql_file in sorted_tables: + if not table_exists(cur, table_name): + missing_tables.append((table_name, sql_file)) + + if not missing_tables: + print("✓ All tables exist, schema is up to date") + cur.close() + conn.close() + return + + # Create missing tables + print(f"📋 Creating {len(missing_tables)} missing table(s)...") + success_count = 0 + for table_name, sql_file in missing_tables: + description = f"{table_name} table" + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + conn.commit() + print(f"✓ Schema initialization complete ({success_count}/{len(missing_tables)} tables created)") + + cur.close() + conn.close() + + except Exception as e: + print(f"⚠ Schema initialization warning: {e}") + print(" API will continue, but some endpoints may not work until tables are created") + + +# Initialize schema on startup +initialize_schema() + + +@app.route('/health', methods=['GET']) +def health_check(): + """Health check endpoint.""" + return {'status': 'healthy'}, 200 + + +if __name__ == '__main__': + port = int(os.getenv('PORT', 5000)) + app.run(host='0.0.0.0', port=port, debug=True) + diff --git a/src/api/db.py b/src/api/db.py new file mode 100644 index 0000000..deb998d --- /dev/null +++ b/src/api/db.py @@ -0,0 +1,41 @@ +""" +Database connection pool for the API. +""" +import mysql.connector +from mysql.connector import pooling +import os + +# Database configuration from environment variables +config = { + 'host': os.getenv('DB_HOST', 'db'), + 'port': int(os.getenv('DB_PORT', 3306)), + 'user': os.getenv('DB_USER', 'mysqluser'), + 'password': os.getenv('DB_PASSWORD', 'mysqlpassword'), + 'database': os.getenv('DB_NAME', 'mydb'), + 'pool_name': 'mil_sql_pool', + 'pool_size': int(os.getenv('DB_POOL_SIZE', 10)), # Increased default to 10, configurable via env + 'pool_reset_session': True +} + +# Create connection pool +try: + connection_pool = pooling.MySQLConnectionPool(**config) +except Exception as e: + print(f"Error creating connection pool: {e}") + connection_pool = None + + +def get_db(): + """ + Get a database connection from the pool. + + Returns: + mysql.connector.connection.MySQLConnection: Database connection + + Raises: + Exception: If connection pool is not initialized + """ + if connection_pool is None: + raise Exception("Database connection pool not initialized") + return connection_pool.get_connection() + diff --git a/src/api/models/__init__.py b/src/api/models/__init__.py new file mode 100644 index 0000000..4efde13 --- /dev/null +++ b/src/api/models/__init__.py @@ -0,0 +1,2 @@ +# Models package + diff --git a/src/api/models/location.py b/src/api/models/location.py new file mode 100644 index 0000000..bb37a6f --- /dev/null +++ b/src/api/models/location.py @@ -0,0 +1,73 @@ +""" +Location model for API responses. +""" +from dataclasses import dataclass +from typing import Optional, Dict, Any + + +@dataclass +class Location: + """Location model representing a storage location.""" + name: str + x: int + y: int + width: int + height: int + type: str + + @classmethod + def from_db_row(cls, row: tuple) -> 'Location': + """ + Create Location from database row. + + Args: + row: Tuple from database query (name, x, y, width, height, type) + + Returns: + Location instance + """ + return cls( + name=row[0], + x=row[1], + y=row[2], + width=row[3], + height=row[4], + type=row[5] + ) + + def to_dict(self) -> Dict[str, Any]: + """ + Convert Location to dictionary for JSON serialization. + + Returns: + Dictionary representation of the location + """ + return { + 'name': self.name, + 'x': self.x, + 'y': self.y, + 'width': self.width, + 'height': self.height, + 'type': self.type + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> 'Location': + """ + Create Location from dictionary. + + Args: + data: Dictionary with location data + + Returns: + Location instance + """ + return cls( + name=data['name'], + x=int(data['x']), + y=int(data['y']), + width=int(data['width']), + height=int(data['height']), + type=data['type'] + ) + diff --git a/src/api/models/supply.py b/src/api/models/supply.py new file mode 100644 index 0000000..7d40dcd --- /dev/null +++ b/src/api/models/supply.py @@ -0,0 +1,59 @@ +""" +Supply data model. +""" +from dataclasses import dataclass +from typing import Optional +from datetime import date + + +@dataclass +class Supply: + """Supply inventory entry model.""" + id: Optional[int] = None + name: str = "" + amount: int = 0 + last_order_date: Optional[date] = None + location: str = "" + + @classmethod + def from_db_row(cls, row): + """Create Supply from database row tuple. + + Args: + row: Tuple from database query (id, name, amount, last_order_date, location) + """ + return cls( + id=row[0], + name=row[1], + amount=row[2], + last_order_date=row[3], + location=row[4] + ) + + @classmethod + def from_dict(cls, data: dict): + """Create Supply from dictionary. + + Args: + data: Dictionary with supply fields + """ + return cls( + id=data.get('id'), + name=data.get('name', ''), + amount=data.get('amount', 0), + last_order_date=data.get('last_order_date'), + location=data.get('location', '') + ) + + def to_dict(self): + """Convert Supply to dictionary.""" + result = { + 'id': self.id, + 'name': self.name, + 'amount': self.amount, + 'location': self.location + } + if self.last_order_date: + result['last_order_date'] = self.last_order_date.isoformat() if hasattr(self.last_order_date, 'isoformat') else str(self.last_order_date) + return result + diff --git a/src/api/routes/__init__.py b/src/api/routes/__init__.py new file mode 100644 index 0000000..fb42c31 --- /dev/null +++ b/src/api/routes/__init__.py @@ -0,0 +1,2 @@ +# Routes package + diff --git a/src/api/routes/locations.py b/src/api/routes/locations.py new file mode 100644 index 0000000..aba9424 --- /dev/null +++ b/src/api/routes/locations.py @@ -0,0 +1,220 @@ +""" +Location API routes. +""" +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + +from flask import Blueprint, request, jsonify +import mysql.connector +from src.api.db import get_db +from src.api.models.location import Location + +locations_bp = Blueprint('locations', __name__) + + +@locations_bp.route('', methods=['GET']) +def get_locations(): + """ + GET /api/locations + Get all locations. + + Returns: + JSON array of all locations + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute("SELECT name, x, y, width, height, type FROM locations ORDER BY name") + rows = cur.fetchall() + locations = [Location.from_db_row(row).to_dict() for row in rows] + cur.close() + conn.close() + return jsonify(locations), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['GET']) +def get_location(name): + """ + GET /api/locations/ + Get a specific location by name. + + Args: + name: Location name + + Returns: + JSON object of the location or 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,)) + row = cur.fetchone() + cur.close() + conn.close() + + if row: + location = Location.from_db_row(row).to_dict() + return jsonify(location), 200 + else: + return jsonify({'error': 'Location not found'}), 404 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('', methods=['POST']) +def create_location(): + """ + POST /api/locations + Create a new location. + + Request body: + { + "name": "string", + "x": int, + "y": int, + "width": int, + "height": int, + "type": "string" + } + + Returns: + JSON object of the created location + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'x', 'y', 'width', 'height', 'type'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + location = Location.from_dict(data) + + conn = get_db() + cur = conn.cursor() + cur.execute( + "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", + (location.name, location.x, location.y, location.width, location.height, location.type) + ) + conn.commit() + cur.close() + conn.close() + + return jsonify(location.to_dict()), 201 + except mysql.connector.IntegrityError as e: + if 'Duplicate entry' in str(e): + return jsonify({'error': 'Location with this name already exists'}), 409 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['PUT']) +def update_location(name): + """ + PUT /api/locations/ + Update an existing location. + + Args: + name: Location name to update + + Request body: + { + "x": int, + "y": int, + "width": int, + "height": int, + "type": "string" + } + Note: name cannot be updated via PUT + + Returns: + JSON object of the updated location + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate fields (name is not updatable via PUT) + updatable_fields = ['x', 'y', 'width', 'height', 'type'] + update_data = {k: v for k, v in data.items() if k in updatable_fields} + + if not update_data: + return jsonify({'error': 'No valid fields to update'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if location exists + cur.execute("SELECT name FROM locations WHERE name = %s", (name,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Location not found'}), 404 + + # Build update query dynamically + set_clauses = [] + values = [] + for field, value in update_data.items(): + set_clauses.append(f"{field} = %s") + values.append(value) + values.append(name) + + query = f"UPDATE locations SET {', '.join(set_clauses)} WHERE name = %s" + cur.execute(query, values) + conn.commit() + + # Fetch updated location + cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,)) + row = cur.fetchone() + location = Location.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(location), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['DELETE']) +def delete_location(name): + """ + DELETE /api/locations/ + Delete a location. + + Args: + name: Location name to delete + + Returns: + 204 No Content on success, 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + + # Check if location exists + cur.execute("SELECT name FROM locations WHERE name = %s", (name,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Location not found'}), 404 + + cur.execute("DELETE FROM locations WHERE name = %s", (name,)) + conn.commit() + cur.close() + conn.close() + + return '', 204 + except Exception as e: + return jsonify({'error': str(e)}), 500 + diff --git a/src/api/routes/supplies.py b/src/api/routes/supplies.py new file mode 100644 index 0000000..d320d67 --- /dev/null +++ b/src/api/routes/supplies.py @@ -0,0 +1,395 @@ +""" +Supply API routes. +""" +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + +from flask import Blueprint, request, jsonify +import mysql.connector +from src.api.db import get_db +from src.api.models.supply import Supply + +supplies_bp = Blueprint('supplies', __name__) + + +@supplies_bp.route('', methods=['GET']) +def get_supplies(): + """ + GET /api/supplies + Get all supplies, optionally filtered by location. + + Query parameters: + location: Optional location name to filter by + + Returns: + JSON array of all supplies + """ + try: + location_filter = request.args.get('location') + + conn = get_db() + cur = conn.cursor() + + if location_filter: + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE location = %s ORDER BY name", + (location_filter,) + ) + else: + cur.execute("SELECT id, name, amount, last_order_date, location FROM supplies ORDER BY name, location") + + rows = cur.fetchall() + supplies = [Supply.from_db_row(row).to_dict() for row in rows] + cur.close() + conn.close() + return jsonify(supplies), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['GET']) +def get_supply(supply_id): + """ + GET /api/supplies/ + Get a specific supply by ID. + + Args: + supply_id: Supply ID + + Returns: + JSON object of the supply or 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + cur.close() + conn.close() + + if row: + supply = Supply.from_db_row(row).to_dict() + return jsonify(supply), 200 + else: + return jsonify({'error': 'Supply not found'}), 404 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('', methods=['POST']) +def create_supply(): + """ + POST /api/supplies + Create a new supply entry. + + Request body: + { + "name": "string", + "amount": int, + "last_order_date": "YYYY-MM-DD" (optional), + "location": "string" + } + + Returns: + JSON object of the created supply + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'amount', 'location'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + if data['amount'] < 0: + return jsonify({'error': 'Amount cannot be negative'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if supply already exists at this location + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['location']) + ) + existing = cur.fetchone() + + if existing: + # Update existing entry by adding to amount + new_amount = existing[1] + data['amount'] + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_amount, existing[0]) + ) + supply_id = existing[0] + else: + # Create new entry + cur.execute( + "INSERT INTO supplies (name, amount, last_order_date, location) VALUES (%s, %s, %s, %s)", + (data['name'], data['amount'], data.get('last_order_date'), data['location']) + ) + supply_id = cur.lastrowid + + conn.commit() + + # Fetch the created/updated supply + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + supply = Supply.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(supply), 201 + except mysql.connector.IntegrityError as e: + if 'foreign key constraint' in str(e).lower(): + return jsonify({'error': 'Location does not exist'}), 400 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['PUT']) +def update_supply(supply_id): + """ + PUT /api/supplies/ + Update an existing supply entry. + + Args: + supply_id: Supply ID to update + + Request body: + { + "amount": int (optional), + "last_order_date": "YYYY-MM-DD" (optional) + } + Note: name and location cannot be updated via PUT (use move endpoint) + + Returns: + JSON object of the updated supply + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate fields (name and location are not updatable via PUT) + updatable_fields = ['amount', 'last_order_date'] + update_data = {k: v for k, v in data.items() if k in updatable_fields} + + if not update_data: + return jsonify({'error': 'No valid fields to update'}), 400 + + if 'amount' in update_data and update_data['amount'] < 0: + return jsonify({'error': 'Amount cannot be negative'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if supply exists + cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Supply not found'}), 404 + + # Build update query dynamically + set_clauses = [] + values = [] + for field, value in update_data.items(): + set_clauses.append(f"{field} = %s") + values.append(value) + values.append(supply_id) + + query = f"UPDATE supplies SET {', '.join(set_clauses)} WHERE id = %s" + cur.execute(query, values) + conn.commit() + + # Fetch updated supply + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + supply = Supply.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(supply), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['DELETE']) +def delete_supply(supply_id): + """ + DELETE /api/supplies/ + Delete a supply entry. + + Args: + supply_id: Supply ID to delete + + Returns: + 204 No Content on success, 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + + # Check if supply exists + cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Supply not found'}), 404 + + cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) + conn.commit() + cur.close() + conn.close() + + return '', 204 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/move', methods=['POST']) +def move_supplies(): + """ + POST /api/supplies/move + Move supplies from one location to another. + + Request body: + { + "name": "string", # Supply name + "from_location": "string", # Source location + "to_location": "string", # Destination location + "amount": int # Amount to move (optional, defaults to all) + } + + Returns: + JSON object with move results: + { + "moved": int, # Amount actually moved + "from_remaining": int, # Remaining at source + "to_total": int # Total at destination after move + } + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'from_location', 'to_location'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + if data['from_location'] == data['to_location']: + return jsonify({'error': 'Source and destination locations must be different'}), 400 + + amount_to_move = data.get('amount') # None means move all + + if amount_to_move is not None and amount_to_move <= 0: + return jsonify({'error': 'Amount to move must be positive'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Get source supply entry + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['from_location']) + ) + source_entry = cur.fetchone() + + if not source_entry: + cur.close() + conn.close() + return jsonify({'error': f'Supply "{data["name"]}" not found at location "{data["from_location"]}"'}), 404 + + source_id, source_amount = source_entry + + # Determine how much to move + if amount_to_move is None: + amount_to_move = source_amount # Move all + elif amount_to_move > source_amount: + cur.close() + conn.close() + return jsonify({ + 'error': f'Cannot move {amount_to_move} units. Only {source_amount} available at source location' + }), 400 + + # Get destination supply entry (if exists) + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['to_location']) + ) + dest_entry = cur.fetchone() + + # Calculate new amounts + new_source_amount = source_amount - amount_to_move + if dest_entry: + # Destination exists - add to it + dest_id, dest_amount = dest_entry + new_dest_amount = dest_amount + amount_to_move + + # Update destination + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_dest_amount, dest_id) + ) + else: + # Destination doesn't exist - create new entry + cur.execute( + "INSERT INTO supplies (name, amount, location) VALUES (%s, %s, %s)", + (data['name'], amount_to_move, data['to_location']) + ) + new_dest_amount = amount_to_move + + # Update or delete source + if new_source_amount > 0: + # Update source with remaining amount + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_source_amount, source_id) + ) + else: + # Delete source entry if amount becomes 0 + cur.execute("DELETE FROM supplies WHERE id = %s", (source_id,)) + + conn.commit() + + result = { + 'moved': amount_to_move, + 'from_remaining': new_source_amount, + 'to_total': new_dest_amount + } + + cur.close() + conn.close() + + return jsonify(result), 200 + except mysql.connector.IntegrityError as e: + if 'foreign key constraint' in str(e).lower(): + return jsonify({'error': 'Location does not exist'}), 400 + if 'unique_supply_location' in str(e).lower(): + return jsonify({'error': 'Supply already exists at destination (this should not happen)'}), 500 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + diff --git a/src/scripts/app.py b/src/scripts/app.py deleted file mode 100644 index 9ea61ca..0000000 --- a/src/scripts/app.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -import psycopg2 - -DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") - -try: - conn = psycopg2.connect(DATABASE_URL) - cur = conn.cursor() - cur.execute("SELECT version();") - print("Postgres version:", cur.fetchone()) - cur.close() - conn.close() -except Exception as e: - print("Database connection failed:", e) - diff --git a/src/scripts/command_drop_production_database.py b/src/scripts/command_drop_production_database.py new file mode 100644 index 0000000..724ad08 --- /dev/null +++ b/src/scripts/command_drop_production_database.py @@ -0,0 +1,99 @@ +""" +Command script to drop the production database. + +⚠️ WARNING: This will permanently delete all data in the production database! +This action cannot be undone. + +Usage: + python src/scripts/command_drop_production_database.py +""" +import os +import sys +import mysql.connector +from helpers import parse_database_url + +# Get database URL +DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + + +def drop_production_database(): + """Drop the production database.""" + try: + # Parse database URL + db_params = parse_database_url(DATABASE_URL) + database_name = db_params['database'] + + print("⚠️ WARNING: You are about to drop the production database!") + print(f" Database: {database_name}") + print(f" Host: {db_params['host']}:{db_params['port']}") + print() + + # Get root password for database operations + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root to drop database + root_conn_params = { + 'host': db_params['host'], + 'port': db_params['port'], + 'user': 'root', + 'password': root_password + } + + print("🔌 Connecting to MySQL server as root...") + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + + # Verify connection + root_cur.execute("SELECT VERSION();") + version = root_cur.fetchone()[0] + print(f"✓ Connected to MySQL: {version}") + + # Check if database exists + root_cur.execute( + "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA WHERE SCHEMA_NAME = %s", + (database_name,) + ) + db_exists = root_cur.fetchone() is not None + + if not db_exists: + print(f"\n⚠️ Database '{database_name}' does not exist. Nothing to drop.") + root_cur.close() + root_conn.close() + return + + # Get list of tables before dropping + root_cur.execute(f"USE `{database_name}`") + root_cur.execute("SHOW TABLES") + tables = root_cur.fetchall() + table_count = len(tables) + + print(f"\n📊 Database '{database_name}' contains {table_count} table(s):") + if tables: + for table in tables[:10]: # Show first 10 + print(f" - {table[0]}") + if table_count > 10: + print(f" ... and {table_count - 10} more") + + print(f"\n🗑️ Dropping database '{database_name}'...") + root_cur.execute(f"DROP DATABASE IF EXISTS `{database_name}`") + root_conn.commit() + + print(f"✓ Database '{database_name}' has been dropped successfully!") + print(f" All {table_count} table(s) and all data have been permanently deleted.") + + root_cur.close() + root_conn.close() + + except mysql.connector.Error as e: + print(f"\n✗ Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"\n✗ Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + drop_production_database() + diff --git a/src/scripts/helpers.py b/src/scripts/helpers.py new file mode 100644 index 0000000..bcf8532 --- /dev/null +++ b/src/scripts/helpers.py @@ -0,0 +1,208 @@ +""" +Shared helper functions for database schema management. + +This module contains common utilities used by app.py and test_tables.py +for discovering, parsing, and managing database tables. +""" +from pathlib import Path +import re +from urllib.parse import urlparse +from collections import defaultdict, deque +import os + +# API URLs for test scripts +# Production API (port 5000, database: mydb) +API_URL = os.getenv("API_URL", "http://localhost:5000/api") + +# Test API (port 5001, database: mydb_test) +# Detect if running in Docker (check for /app path or container name) +# When in Docker, use service name 'api-test', otherwise use 'localhost' +if os.path.exists("/app") or os.getenv("HOSTNAME", "").startswith("mysql_api"): + # Running in Docker container - use service name + TEST_API_URL = os.getenv("TEST_API_URL", "http://api-test:5001/api") +else: + # Running locally - use localhost + TEST_API_URL = os.getenv("TEST_API_URL", "http://localhost:5001/api") + + +def get_sql_base_path(script_file): + """ + Get the SQL base path, works in both Docker and local environments. + + Args: + script_file: The __file__ from the calling script + + Returns: + Path object pointing to the SQL directory + """ + if Path("/app/src/sql").exists(): + return Path("/app/src/sql") # Docker path + else: + # Local development path (relative to script location) + return Path(script_file).parent.parent / "sql" + + +def discover_table_files(sql_base_path): + """ + Recursively discover all table_*.sql files in the SQL directory. + Returns a list of (table_name, file_path) tuples. + """ + table_files = [] + if not sql_base_path.exists(): + return table_files + + # Recursively find all table_*.sql files + for sql_file in sql_base_path.rglob("table_*.sql"): + # Extract table name from filename: table_.sql -> + match = re.match(r"table_(.+)\.sql$", sql_file.name, re.IGNORECASE) + if match: + table_name = match.group(1) + table_files.append((table_name, sql_file)) + + return table_files + + +def extract_table_dependencies(sql_content, table_name): + """ + Extract foreign key dependencies from SQL content. + Returns a set of table names that this table depends on. + """ + dependencies = set() + # Look for REFERENCES table_name patterns (case insensitive) + # Matches: REFERENCES table_name, REFERENCES `table_name`, REFERENCES schema.table_name + pattern = r'REFERENCES\s+(?:`?(\w+)`?\.)?`?(\w+)`?' + matches = re.finditer(pattern, sql_content, re.IGNORECASE) + for match in matches: + ref_table = match.group(2) + if ref_table and ref_table.lower() != table_name.lower(): + dependencies.add(ref_table.lower()) + return dependencies + + +def topological_sort_tables(table_files): + """ + Sort tables in dependency order using topological sort. + Tables with no dependencies come first. + """ + # Build dependency graph + table_names = [name for name, _ in table_files] + dependencies = {} + table_file_map = {} + + for table_name, sql_file in table_files: + table_file_map[table_name.lower()] = (table_name, sql_file) + sql_content = sql_file.read_text() + deps = extract_table_dependencies(sql_content, table_name) + dependencies[table_name.lower()] = deps + + # Topological sort + in_degree = defaultdict(int) + graph = defaultdict(list) + + for table in table_names: + table_lower = table.lower() + in_degree[table_lower] = 0 + + for table in table_names: + table_lower = table.lower() + for dep in dependencies[table_lower]: + if dep in [t.lower() for t in table_names]: + graph[dep].append(table_lower) + in_degree[table_lower] += 1 + + # Kahn's algorithm + queue = deque([t for t in [tn.lower() for tn in table_names] if in_degree[t] == 0]) + sorted_tables = [] + + while queue: + current = queue.popleft() + if current in table_file_map: + sorted_tables.append(table_file_map[current]) + for neighbor in graph[current]: + in_degree[neighbor] -= 1 + if in_degree[neighbor] == 0: + queue.append(neighbor) + + # Add any remaining tables (shouldn't happen if no cycles, but handle gracefully) + for table_name, sql_file in table_files: + table_lower = table_name.lower() + if (table_name, sql_file) not in sorted_tables: + sorted_tables.append((table_name, sql_file)) + + return sorted_tables + + +def parse_database_url(url): + """Parse MySQL connection URL and return connection parameters.""" + parsed = urlparse(url) + return { + 'host': parsed.hostname, + 'port': parsed.port or 3306, + 'user': parsed.username, + 'password': parsed.password, + 'database': parsed.path.lstrip('/') + } + + +def table_exists(cur, table_name, database_name=None): + """ + Check if a table exists in the database. + + Args: + cur: Database cursor + table_name: Name of the table to check + database_name: Optional database name (if None, uses current database) + """ + if database_name: + cur.execute( + "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = %s AND table_name = %s", + (database_name, table_name) + ) + else: + cur.execute( + "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = %s", + (table_name,) + ) + return cur.fetchone()[0] > 0 + + +def get_table_columns(cur, table_name, database_name=None): + """ + Get column information for a table. + + Args: + cur: Database cursor + table_name: Name of the table + database_name: Optional database name (if None, uses current database) + + Returns: + List of tuples with column information + """ + if database_name: + cur.execute( + "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_KEY FROM information_schema.columns WHERE table_schema = %s AND table_name = %s ORDER BY ORDINAL_POSITION", + (database_name, table_name) + ) + else: + cur.execute( + "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_KEY FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = %s ORDER BY ORDINAL_POSITION", + (table_name,) + ) + return cur.fetchall() + + +def execute_sql_file(cur, sql_file_path, description): + """Execute a SQL file and handle errors gracefully.""" + try: + sql_content = sql_file_path.read_text() + # Split by semicolons to handle multiple statements + statements = [s.strip() for s in sql_content.split(';') if s.strip()] + for statement in statements: + if statement: + cur.execute(statement) + print(f"✓ {description} succeeded") + return True + except Exception as e: + print(f"✗ {description} failed: {e}") + return False + diff --git a/src/scripts/seed_locations.py b/src/scripts/seed_locations.py new file mode 100644 index 0000000..9ce7b7f --- /dev/null +++ b/src/scripts/seed_locations.py @@ -0,0 +1,179 @@ +""" +Seed default locations into the database. +This script is idempotent - it only inserts if no locations exist. +""" +import os +import sys +from pathlib import Path + +# Add src/scripts to path for imports (so we can import helpers directly) +sys.path.insert(0, str(Path(__file__).parent)) + +import mysql.connector +import time +from helpers import parse_database_url, get_sql_base_path, execute_sql_file, table_exists + +# Default locations from milventory frontend (InventoryContext.js) +DEFAULT_LOCATIONS = [ + {'name': 'Workbench', 'x': 140, 'y': 300, 'width': 150, 'height': 170, 'type': 'workbench'}, + {'name': 'File Cabinet A', 'x': 140, 'y': 700, 'width': 200, 'height': 260, 'type': 'cabinet'}, + {'name': 'File Cabinet B', 'x': 140, 'y': 1000, 'width': 200, 'height': 260, 'type': 'cabinet'}, + {'name': 'Drawer T1', 'x': 200, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T2', 'x': 410, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T3', 'x': 620, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T4', 'x': 830, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T5', 'x': 1040, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T6', 'x': 1250, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer R1', 'x': 1340, 'y': 320, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R2', 'x': 1340, 'y': 520, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R3', 'x': 1340, 'y': 720, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R4', 'x': 1340, 'y': 920, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R5', 'x': 1340, 'y': 1120, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Table A', 'x': 420, 'y': 520, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table B', 'x': 880, 'y': 520, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table C', 'x': 420, 'y': 940, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table D', 'x': 880, 'y': 940, 'width': 300, 'height': 200, 'type': 'table'}, +] + + +def seed_locations(): + """Seed default locations if none exist.""" + try: + # Get database connection parameters + database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + db_params = parse_database_url(database_url) + database_name = db_params['database'] + + # Get root password for database creation + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # First, ensure the database exists (connect as root to create it if needed) + root_conn_params = { + 'host': db_params['host'], + 'port': db_params['port'], + 'user': 'root', + 'password': root_password + } + + try: + # Try to connect to the database first + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + cur.close() + conn.close() + except mysql.connector.Error as e: + # If database doesn't exist, create it + if 'Unknown database' in str(e) or '1049' in str(e): + print(f"📦 Database '{database_name}' does not exist, creating it...") + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"CREATE DATABASE IF NOT EXISTS `{database_name}`") + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{database_name}`.* TO '{db_params['user']}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + root_cur.close() + root_conn.close() + print(f"✓ Database '{database_name}' created") + # Small delay to ensure privileges are propagated + import time + time.sleep(0.5) + else: + raise + + print("🌱 Checking for existing locations...") + + # Connect to database + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + + # Check if locations table exists, create it if needed + if not table_exists(cur, 'locations'): + print("⚠ Locations table does not exist. Creating it...") + # Get SQL base path and find locations table file + sql_base_path = get_sql_base_path(__file__) + # Try both possible filenames + locations_file = sql_base_path / "location" / "table_locations.sql" + if not locations_file.exists(): + locations_file = sql_base_path / "location" / "table_location.sql" + + if locations_file.exists(): + if execute_sql_file(cur, locations_file, "locations table"): + conn.commit() + print("✓ Locations table created") + else: + print("✗ Failed to create locations table") + cur.close() + conn.close() + sys.exit(1) + else: + print(f"✗ Locations table SQL file not found at {locations_file}") + print(" Waiting for API to create it...") + # Wait and retry a few times + cur.close() + conn.close() + for attempt in range(5): + time.sleep(2) + try: + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + if table_exists(cur, 'locations'): + print("✓ Locations table now exists (created by API)") + break + cur.close() + conn.close() + except: + pass + else: + # Final check + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + if not table_exists(cur, 'locations'): + print("✗ Locations table still does not exist after waiting.") + cur.close() + conn.close() + sys.exit(1) + + # Check if any locations exist + cur.execute("SELECT COUNT(*) FROM locations") + count = cur.fetchone()[0] + + if count > 0: + print(f"✓ Found {count} existing location(s), skipping seed") + cur.close() + conn.close() + return + + # Insert default locations + print(f"📦 Seeding {len(DEFAULT_LOCATIONS)} default locations...") + + insert_count = 0 + for loc in DEFAULT_LOCATIONS: + try: + cur.execute( + "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", + (loc['name'], loc['x'], loc['y'], loc['width'], loc['height'], loc['type']) + ) + insert_count += 1 + except mysql.connector.IntegrityError: + # Skip if already exists (shouldn't happen, but be safe) + print(f" ⚠ {loc['name']} already exists, skipping") + + conn.commit() + print(f"✓ Successfully seeded {insert_count} location(s)") + + cur.close() + conn.close() + + except mysql.connector.Error as e: + print(f"✗ Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"✗ Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + seed_locations() + diff --git a/src/scripts/test_gui.py b/src/scripts/test_gui.py new file mode 100644 index 0000000..60265e0 --- /dev/null +++ b/src/scripts/test_gui.py @@ -0,0 +1,633 @@ +""" +GUI Test Runner for mil-sql tests. + +Opens a tkinter window with buttons to run individual tests. +Automatically discovers test_*.py files in the scripts directory. + +Usage: + python src/scripts/test_gui.py +""" +import tkinter as tk +from tkinter import scrolledtext, ttk, messagebox +import subprocess +import sys +import os +import json +from pathlib import Path +import threading +import re + + +class TestRunnerGUI: + def __init__(self, root): + self.root = root + self.root.title("Mil-SQL Test Runner") + self.root.geometry("800x600") + + # Get scripts directory + self.scripts_dir = Path(__file__).parent + + # Discover test files + self.test_files = self.discover_tests() + # Discover command files + self.command_files = self.discover_commands() + + # Create UI + self.create_ui() + + def discover_tests(self): + """Discover all test_*.py files in the scripts directory.""" + test_files = [] + for test_file in self.scripts_dir.glob("test_*.py"): + if test_file.name != "test_gui.py": # Exclude this file + # Extract test name from filename: test_.py -> + match = re.match(r"test_(.+)\.py$", test_file.name, re.IGNORECASE) + if match: + test_name = match.group(1).replace("_", " ").title() + test_files.append((test_name, test_file)) + return sorted(test_files) + + def discover_commands(self): + """Discover all command_*.py files in the scripts directory.""" + command_files = [] + for command_file in self.scripts_dir.glob("command_*.py"): + # Extract command name from filename: command_.py -> + match = re.match(r"command_(.+)\.py$", command_file.name, re.IGNORECASE) + if match: + command_name = match.group(1).replace("_", " ").title() + command_files.append((command_name, command_file)) + return sorted(command_files) + + def create_ui(self): + """Create the user interface.""" + # Top frame for buttons + button_frame = tk.Frame(self.root, padx=10, pady=10) + button_frame.pack(fill=tk.X) + + # Tests section + tk.Label(button_frame, text="Available Tests:", font=("Arial", 12, "bold")).pack(anchor=tk.W) + + # Create buttons for each test + if not self.test_files: + tk.Label(button_frame, text="No test files found (test_*.py)", fg="gray").pack(anchor=tk.W, pady=5) + else: + buttons_frame = tk.Frame(button_frame) + buttons_frame.pack(fill=tk.X, pady=5) + + for test_name, test_file in self.test_files: + btn = tk.Button( + buttons_frame, + text=f"▶ {test_name}", + command=lambda tf=test_file: self.run_test(tf), + width=20, + height=2, + font=("Arial", 10) + ) + btn.pack(side=tk.LEFT, padx=5, pady=5) + + # Run all tests button + tk.Button( + buttons_frame, + text="▶ Run All Tests", + command=self.run_all_tests, + width=20, + height=2, + font=("Arial", 10, "bold"), + bg="#4CAF50", + fg="white" + ).pack(side=tk.LEFT, padx=5, pady=5) + + # Commands section (separate from tests) + if self.command_files: + # Separator + separator = tk.Frame(button_frame, height=2, bg="gray", relief=tk.SUNKEN) + separator.pack(fill=tk.X, pady=15) + + tk.Label(button_frame, text="Commands:", font=("Arial", 12, "bold"), fg="#d32f2f").pack(anchor=tk.W, pady=(10, 5)) + + commands_frame = tk.Frame(button_frame) + commands_frame.pack(fill=tk.X, pady=5) + + for command_name, command_file in self.command_files: + btn = tk.Button( + commands_frame, + text=f"⚠ {command_name}", + command=lambda cf=command_file, cn=command_name: self.run_command(cf, cn), + width=20, + height=2, + font=("Arial", 10), + bg="#d32f2f", + fg="white" + ) + btn.pack(side=tk.LEFT, padx=5, pady=5) + + # Output area + output_frame = tk.Frame(self.root, padx=10, pady=10) + output_frame.pack(fill=tk.BOTH, expand=True) + + tk.Label(output_frame, text="Test Output:", font=("Arial", 12, "bold")).pack(anchor=tk.W) + + self.output_text = scrolledtext.ScrolledText( + output_frame, + wrap=tk.WORD, + width=80, + height=20, + font=("Consolas", 9) + ) + self.output_text.pack(fill=tk.BOTH, expand=True, pady=5) + + # Status bar + self.status_var = tk.StringVar(value="Ready") + status_bar = tk.Label( + self.root, + textvariable=self.status_var, + relief=tk.SUNKEN, + anchor=tk.W, + padx=10 + ) + status_bar.pack(side=tk.BOTTOM, fill=tk.X) + + # Clear button + clear_btn = tk.Button( + output_frame, + text="Clear Output", + command=self.clear_output, + width=15 + ) + clear_btn.pack(anchor=tk.E, pady=5) + + def clear_output(self): + """Clear the output text area.""" + self.output_text.delete(1.0, tk.END) + self.status_var.set("Ready") + + def log(self, message, tag=None): + """Add a message to the output area.""" + self.output_text.insert(tk.END, message + "\n", tag) + self.output_text.see(tk.END) + self.root.update() + + def run_test(self, test_file): + """Run a single test file.""" + test_name = test_file.stem.replace("test_", "").replace("_", " ").title() + self.log(f"\n{'='*60}") + self.log(f"Running: {test_name}") + self.log(f"File: {test_file.name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running: {test_name}...") + + # Run test in a separate thread to avoid blocking UI + thread = threading.Thread(target=self._run_test_thread, args=(test_file,), daemon=True) + thread.start() + + def run_command(self, command_file, command_name): + """Run a command file with a warning dialog.""" + # Show warning dialog + warning_msg = ( + f"⚠️ WARNING: You are about to execute a command!\n\n" + f"Command: {command_name}\n" + f"File: {command_file.name}\n\n" + f"This action may modify or delete data.\n" + f"Are you sure you want to continue?" + ) + + result = messagebox.askyesno( + "Confirm Command Execution", + warning_msg, + icon="warning", + default="no" + ) + + if not result: + self.log(f"\n⚠ Command '{command_name}' was cancelled by user.") + return + + # User confirmed - proceed with execution + self.log(f"\n{'='*60}") + self.log(f"Executing Command: {command_name}") + self.log(f"File: {command_file.name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Executing: {command_name}...") + + # Run command in a separate thread to avoid blocking UI + thread = threading.Thread(target=self._run_command_thread, args=(command_file, command_name), daemon=True) + thread.start() + + def _run_command_thread(self, command_file, command_name): + """Run command in a separate thread and capture output.""" + try: + # Get the command filename relative to scripts directory + command_filename = command_file.name + + # Run command in Docker container + # Use docker-compose exec to run the command in the api container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{command_filename}"] + + # Run the command + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + # Stream output in real-time + for line in process.stdout: + line_stripped = line.rstrip() + self.log(line_stripped) + + process.wait() + + if process.returncode == 0: + self.log(f"\n✓ Command completed successfully!", "success") + self.status_var.set("Command completed!") + else: + self.log(f"\n✗ Command failed with exit code {process.returncode}", "error") + self.status_var.set("Command failed!") + + except Exception as e: + self.log(f"\n✗ Error running command: {e}", "error") + self.status_var.set(f"Error: {str(e)}") + + def _run_test_thread(self, test_file): + """Run test in a separate thread and capture output.""" + try: + # Get the test filename relative to scripts directory + test_filename = test_file.name + + # Run test in Docker container + # Use docker-compose exec to run the test in the api container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{test_filename}"] + + # Run the test + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + # Capture all output to detect JSON data + output_lines = [] + json_data = None + in_json_block = False + json_lines = [] + + # Stream output in real-time + for line in process.stdout: + line_stripped = line.rstrip() + output_lines.append(line_stripped) + self.log(line_stripped) + + # Detect JSON data block + if "TABLE_DATA_JSON_START" in line_stripped: + in_json_block = True + json_lines = [] + elif "TABLE_DATA_JSON_END" in line_stripped: + in_json_block = False + # Parse JSON + try: + json_str = "\n".join(json_lines) + json_data = json.loads(json_str) + except json.JSONDecodeError as e: + self.log(f"\n⚠ Could not parse table data JSON: {e}", "error") + elif in_json_block: + json_lines.append(line_stripped) + + process.wait() + + # If we got JSON data, open viewer window locally + if json_data and process.returncode == 0: + self.log("\n🪟 Opening table viewer window locally...") + self.root.after(100, lambda: self._open_table_viewer(json_data)) + + if process.returncode == 0: + self.log(f"\n✓ Test completed successfully!", "success") + self.status_var.set("Test passed!") + else: + self.log(f"\n✗ Test failed with exit code {process.returncode}", "error") + self.status_var.set("Test failed!") + + except Exception as e: + self.log(f"\n✗ Error running test: {e}", "error") + self.status_var.set(f"Error: {str(e)}") + + def _open_table_viewer(self, table_data): + """Open a table viewer window with the provided data.""" + try: + # Check which format we have + if 'locations' in table_data and 'supplies' in table_data: + # Format from test_locations_supplies.py - use interactive viewer + try: + # Import the interactive viewer from test_locations_supplies + import importlib.util + import os + viewer_path = self.scripts_dir / "test_locations_supplies.py" + spec = importlib.util.spec_from_file_location("test_locations_supplies", viewer_path) + test_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(test_module) + + # Convert JSON data back to tuples + locations_data = [tuple(row) for row in table_data['locations']['data']] + supplies_data = [tuple(row) for row in table_data['supplies']['data']] + + # Use the interactive viewer (no cleanup needed - API manages the test database) + test_module.create_table_viewer(locations_data, supplies_data, cleanup_callback=None) + except Exception as e: + # Fallback to simple viewer + self.log(f"\n⚠ Could not load interactive viewer, using simple viewer: {e}", "error") + locations_data = [tuple(row) for row in table_data['locations']['data']] + supplies_data = [tuple(row) for row in table_data['supplies']['data']] + self._create_table_viewer_window(locations_data, supplies_data) + elif 'successful_tables' in table_data or 'failed_tables' in table_data: + # Format from test_tables.py - show table creation results + self._create_all_tables_viewer_window(table_data) + else: + self.log(f"\n⚠ Unknown table data format", "error") + except Exception as e: + self.log(f"\n⚠ Could not open table viewer: {e}", "error") + import traceback + self.log(traceback.format_exc(), "error") + + def _create_table_viewer_window(self, locations_data, supplies_data): + """Create a tkinter window to display table contents.""" + root = tk.Tk() + root.title("Database Table Contents - Locations & Supplies") + root.geometry("1200x700") + + # Create notebook for tabs + notebook = ttk.Notebook(root) + notebook.pack(fill=tk.BOTH, expand=True, padx=10, pady=10) + + # Locations tab + locations_frame = ttk.Frame(notebook) + notebook.add(locations_frame, text=f"Locations ({len(locations_data)} rows)") + + # Locations treeview + loc_tree = ttk.Treeview(locations_frame, columns=('name', 'x', 'y', 'width', 'height', 'type'), show='headings', height=20) + loc_tree.heading('name', text='Name') + loc_tree.heading('x', text='X') + loc_tree.heading('y', text='Y') + loc_tree.heading('width', text='Width') + loc_tree.heading('height', text='Height') + loc_tree.heading('type', text='Type') + + # Configure column widths + loc_tree.column('name', width=150, anchor=tk.W) + loc_tree.column('x', width=60, anchor=tk.CENTER) + loc_tree.column('y', width=60, anchor=tk.CENTER) + loc_tree.column('width', width=80, anchor=tk.CENTER) + loc_tree.column('height', width=80, anchor=tk.CENTER) + loc_tree.column('type', width=100, anchor=tk.W) + + # Add scrollbar for locations + loc_scroll = ttk.Scrollbar(locations_frame, orient=tk.VERTICAL, command=loc_tree.yview) + loc_tree.configure(yscrollcommand=loc_scroll.set) + + loc_tree.pack(side=tk.LEFT, fill=tk.BOTH, expand=True) + loc_scroll.pack(side=tk.RIGHT, fill=tk.Y) + + # Populate locations data + for row in locations_data: + loc_tree.insert('', tk.END, values=row) + + # Supplies tab + supplies_frame = ttk.Frame(notebook) + notebook.add(supplies_frame, text=f"Supplies ({len(supplies_data)} rows)") + + # Supplies treeview + sup_tree = ttk.Treeview(supplies_frame, columns=('id', 'name', 'amount', 'last_order_date', 'location'), show='headings', height=20) + sup_tree.heading('id', text='ID') + sup_tree.heading('name', text='Name') + sup_tree.heading('amount', text='Amount') + sup_tree.heading('last_order_date', text='Last Order Date') + sup_tree.heading('location', text='Location') + + # Configure column widths + sup_tree.column('id', width=50, anchor=tk.CENTER) + sup_tree.column('name', width=200, anchor=tk.W) + sup_tree.column('amount', width=80, anchor=tk.CENTER) + sup_tree.column('last_order_date', width=120, anchor=tk.CENTER) + sup_tree.column('location', width=150, anchor=tk.W) + + # Add scrollbar for supplies + sup_scroll = ttk.Scrollbar(supplies_frame, orient=tk.VERTICAL, command=sup_tree.yview) + sup_tree.configure(yscrollcommand=sup_scroll.set) + + sup_tree.pack(side=tk.LEFT, fill=tk.BOTH, expand=True) + sup_scroll.pack(side=tk.RIGHT, fill=tk.Y) + + # Populate supplies data + for row in supplies_data: + sup_tree.insert('', tk.END, values=row) + + # Status bar + status_frame = tk.Frame(root) + status_frame.pack(fill=tk.X, padx=10, pady=5) + status_label = tk.Label(status_frame, text=f"Locations: {len(locations_data)} | Supplies: {len(supplies_data)}", + relief=tk.SUNKEN, anchor=tk.W, padx=10) + status_label.pack(fill=tk.X) + + # Close button + button_frame = tk.Frame(root) + button_frame.pack(pady=10) + tk.Button(button_frame, text="Close", command=root.destroy, width=15, height=2).pack() + + root.mainloop() + + def _create_all_tables_viewer_window(self, table_data): + """Create a tkinter window to display table creation results.""" + root = tk.Tk() + database_name = table_data.get('database', 'Unknown') + summary = table_data.get('summary', {}) + successful_tables = table_data.get('successful_tables', []) + failed_tables = table_data.get('failed_tables', []) + + root.title(f"Table Creation Results - {database_name}") + root.geometry("800x600") + + # Main frame + main_frame = tk.Frame(root, padx=20, pady=20) + main_frame.pack(fill=tk.BOTH, expand=True) + + # Title + title_label = tk.Label( + main_frame, + text=f"Database: {database_name}", + font=("Arial", 14, "bold") + ) + title_label.pack(anchor=tk.W, pady=(0, 10)) + + # Summary section + summary_frame = tk.LabelFrame(main_frame, text="Summary", padx=10, pady=10) + summary_frame.pack(fill=tk.X, pady=10) + + summary_text = f"""Created: {summary.get('created', 0)}/{summary.get('total', 0)} tables +Successful: {summary.get('successful_count', 0)} tables +Failed: {summary.get('failed_count', 0)} tables +All Valid: {'Yes ✓' if summary.get('all_valid', False) else 'No ✗'}""" + + summary_label = tk.Label( + summary_frame, + text=summary_text, + font=("Consolas", 10), + justify=tk.LEFT + ) + summary_label.pack(anchor=tk.W) + + # Successful tables section + if successful_tables: + success_frame = tk.LabelFrame(main_frame, text=f"✓ Successful Tables ({len(successful_tables)})", padx=10, pady=10) + success_frame.pack(fill=tk.BOTH, expand=True, pady=10) + + success_listbox = tk.Listbox(success_frame, font=("Consolas", 10)) + success_listbox.pack(fill=tk.BOTH, expand=True) + + for table_name in sorted(successful_tables): + success_listbox.insert(tk.END, f" ✓ {table_name}") + else: + success_frame = tk.LabelFrame(main_frame, text="✓ Successful Tables (0)", padx=10, pady=10) + success_frame.pack(fill=tk.X, pady=10) + tk.Label(success_frame, text="No successful tables", fg="gray").pack() + + # Failed tables section + if failed_tables: + failed_frame = tk.LabelFrame(main_frame, text=f"✗ Failed Tables ({len(failed_tables)})", padx=10, pady=10) + failed_frame.pack(fill=tk.BOTH, expand=True, pady=10) + + failed_listbox = tk.Listbox(failed_frame, font=("Consolas", 10), fg="red") + failed_listbox.pack(fill=tk.BOTH, expand=True) + + for table_name in sorted(failed_tables): + failed_listbox.insert(tk.END, f" ✗ {table_name}") + else: + failed_frame = tk.LabelFrame(main_frame, text="✗ Failed Tables (0)", padx=10, pady=10) + failed_frame.pack(fill=tk.X, pady=10) + tk.Label(failed_frame, text="No failed tables", fg="green").pack() + + # Status bar + status_frame = tk.Frame(root) + status_frame.pack(fill=tk.X, padx=10, pady=5) + status_label = tk.Label( + status_frame, + text=f"Total: {summary.get('total', 0)} tables | Successful: {len(successful_tables)} | Failed: {len(failed_tables)}", + relief=tk.SUNKEN, + anchor=tk.W, + padx=10 + ) + status_label.pack(fill=tk.X) + + # Close button + button_frame = tk.Frame(root) + button_frame.pack(pady=10) + tk.Button(button_frame, text="Close", command=root.destroy, width=15, height=2).pack() + + root.mainloop() + + def run_all_tests(self): + """Run all discovered tests sequentially.""" + if not self.test_files: + self.log("No tests to run.") + return + + self.log(f"\n{'='*60}") + self.log(f"Running All Tests ({len(self.test_files)} test(s))") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running all tests...") + + # Run tests in sequence + thread = threading.Thread(target=self._run_all_tests_thread, daemon=True) + thread.start() + + def _run_all_tests_thread(self): + """Run all tests in a separate thread.""" + passed = 0 + failed = 0 + + for test_name, test_file in self.test_files: + self.log(f"\n{'='*60}") + self.log(f"Running: {test_name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running: {test_name}... ({passed + failed + 1}/{len(self.test_files)})") + + try: + # Get the test filename relative to scripts directory + test_filename = test_file.name + + # Run test in Docker container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{test_filename}"] + + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + for line in process.stdout: + self.log(line.rstrip()) + + process.wait() + + if process.returncode == 0: + self.log(f"\n✓ {test_name} PASSED\n", "success") + passed += 1 + else: + self.log(f"\n✗ {test_name} FAILED\n", "error") + failed += 1 + + except Exception as e: + self.log(f"\n✗ {test_name} ERROR: {e}\n", "error") + failed += 1 + + # Summary + self.log(f"\n{'='*60}") + self.log(f"Test Summary: {passed} passed, {failed} failed out of {len(self.test_files)} total") + self.log(f"{'='*60}\n") + + if failed == 0: + self.status_var.set(f"All tests passed! ({passed}/{len(self.test_files)})") + else: + self.status_var.set(f"Some tests failed: {passed} passed, {failed} failed") + + +def main(): + """Main entry point.""" + # GUI runs locally on Windows desktop + # Tests are executed in Docker containers via docker-compose exec + + root = tk.Tk() + app = TestRunnerGUI(root) + + # Configure text tags for colors + app.output_text.tag_config("success", foreground="green") + app.output_text.tag_config("error", foreground="red") + + # Welcome message + app.log("Mil-SQL Test Runner") + app.log("=" * 60) + app.log(f"Found {len(app.test_files)} test(s)") + app.log("Tests will run in Docker containers.") + app.log("Click a button above to run a test, or 'Run All Tests' to run everything.\n") + + root.mainloop() + + +if __name__ == "__main__": + main() + diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py new file mode 100644 index 0000000..33e652b --- /dev/null +++ b/src/scripts/test_locations_supplies.py @@ -0,0 +1,491 @@ +""" +Test script for locations and supplies data using the test API. + +This script: +1. Creates sample locations (3 containers) via API +2. Creates sample supplies via API +3. Displays data in an interactive GUI window +4. Allows moving supplies between containers via API + +Usage: + python src/scripts/test_locations_supplies.py +""" +import os +import sys +import json +import time +import requests +import tkinter as tk +from tkinter import messagebox +from pathlib import Path + +# Add src/scripts to path for imports +sys.path.insert(0, str(Path(__file__).parent)) + +from helpers import TEST_API_URL + +# Container names for this test +CONTAINER_NAMES = ['Container A', 'Container B', 'Container C'] + +# Sample supplies (9 distinct supplies, 3 per container) +SAMPLE_SUPPLIES = [ + # Container A + {'name': 'Supply A1', 'amount': 10, 'location': 'Container A'}, + {'name': 'Supply A2', 'amount': 15, 'location': 'Container A'}, + {'name': 'Supply A3', 'amount': 20, 'location': 'Container A'}, + # Container B + {'name': 'Supply B1', 'amount': 12, 'location': 'Container B'}, + {'name': 'Supply B2', 'amount': 18, 'location': 'Container B'}, + {'name': 'Supply B3', 'amount': 25, 'location': 'Container B'}, + # Container C + {'name': 'Supply C1', 'amount': 8, 'location': 'Container C'}, + {'name': 'Supply C2', 'amount': 14, 'location': 'Container C'}, + {'name': 'Supply C3', 'amount': 22, 'location': 'Container C'}, +] + +# DISTRIBUTED SUPPLY amounts per container +DISTRIBUTED_SUPPLY_AMOUNTS = { + 'Container A': 5, + 'Container B': 10, + 'Container C': 15, +} + + +def check_api_available(): + """Check if the test API is available.""" + try: + # Get base URL (remove trailing /api if present) + base_url = TEST_API_URL + if base_url.endswith('/api'): + base_url = base_url[:-4] # Remove '/api' + elif base_url.endswith('/api/'): + base_url = base_url[:-5] # Remove '/api/' + + # Ensure base URL ends with / + if not base_url.endswith('/'): + base_url += '/' + + response = requests.get(f"{base_url}health", timeout=2) + return response.status_code == 200 + except Exception as e: + print(f" Debug: API check failed: {e}") + return False + + +def create_test_locations(): + """Create test container locations via API.""" + print("📦 Creating test locations...") + + locations_created = 0 + locations_skipped = 0 + + for i, container_name in enumerate(CONTAINER_NAMES): + location_data = { + 'name': container_name, + 'x': 200 + i * 300, + 'y': 200, + 'width': 200, + 'height': 200, + 'type': 'cabinet' + } + + try: + response = requests.post(f"{TEST_API_URL}/locations", json=location_data, timeout=5) + if response.status_code == 201: + locations_created += 1 + print(f" ✓ Created {container_name}") + elif response.status_code == 409: + locations_skipped += 1 + print(f" ⊘ {container_name} already exists, skipping") + else: + print(f" ✗ Failed to create {container_name}: {response.status_code} - {response.text}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + print(f" ✗ Error creating {container_name}: {e}") + time.sleep(0.1) + + return locations_created, locations_skipped + + +def create_test_supplies(): + """Create test supplies via API.""" + print("📦 Creating test supplies...") + + supplies_created = 0 + supplies_failed = 0 + + # First, create DISTRIBUTED SUPPLY in all containers + for container_name in CONTAINER_NAMES: + supply_data = { + 'name': 'DISTRIBUTED SUPPLY', + 'amount': DISTRIBUTED_SUPPLY_AMOUNTS[container_name], + 'location': container_name + } + + try: + response = requests.post(f"{TEST_API_URL}/supplies", json=supply_data, timeout=5) + if response.status_code in [201, 200]: + supplies_created += 1 + print(f" ✓ Created DISTRIBUTED SUPPLY in {container_name}") + else: + supplies_failed += 1 + print(f" ✗ Failed to create DISTRIBUTED SUPPLY in {container_name}: {response.status_code}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + supplies_failed += 1 + print(f" ✗ Error creating DISTRIBUTED SUPPLY in {container_name}: {e}") + time.sleep(0.1) + + # Then create the 9 distinct supplies + for supply in SAMPLE_SUPPLIES: + try: + response = requests.post(f"{TEST_API_URL}/supplies", json=supply, timeout=5) + if response.status_code in [201, 200]: + supplies_created += 1 + else: + supplies_failed += 1 + print(f" ✗ Failed to create {supply['name']}: {response.status_code}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + supplies_failed += 1 + print(f" ✗ Error creating {supply['name']}: {e}") + time.sleep(0.1) + + return supplies_created, supplies_failed + + +def get_supplies_from_api(): + """Fetch supplies from the test API, filtered to our containers.""" + try: + # Get all supplies and filter to our containers + response = requests.get(f"{TEST_API_URL}/supplies", timeout=5) + if response.status_code == 200: + all_supplies = response.json() + # Filter to only our containers + filtered = [s for s in all_supplies if s['location'] in CONTAINER_NAMES] + return filtered + else: + print(f"Warning: API returned status {response.status_code}") + return [] + except requests.exceptions.RequestException as e: + print(f"Warning: Error fetching supplies from API: {e}") + return [] + + +def move_supplies_via_api(from_container, to_container): + """Move all supplies from one container to another via API.""" + try: + # Get all supplies in source container + response = requests.get(f"{TEST_API_URL}/supplies?location={from_container}", timeout=5) + if response.status_code != 200: + messagebox.showerror("Error", f"Failed to fetch supplies from {from_container}") + return False + + supplies = response.json() + if not supplies: + messagebox.showinfo("Info", f"No supplies in {from_container} to move.") + return False + + # Move each supply + moved_count = 0 + failed_count = 0 + + for supply in supplies: + move_data = { + 'name': supply['name'], + 'from_location': from_container, + 'to_location': to_container, + 'amount': supply['amount'] # Move all + } + + try: + move_response = requests.post(f"{TEST_API_URL}/supplies/move", json=move_data, timeout=5) + if move_response.status_code == 200: + moved_count += 1 + else: + failed_count += 1 + error_msg = move_response.json().get('error', 'Unknown error') + print(f"Failed to move {supply['name']}: {error_msg}") + except requests.exceptions.RequestException as e: + failed_count += 1 + print(f"Error moving {supply['name']}: {e}") + + if failed_count > 0: + messagebox.showwarning( + "Partial Success", + f"Moved {moved_count} supply type(s), {failed_count} failed." + ) + else: + # Success - no popup, just refresh + pass + + return moved_count > 0 + + except requests.exceptions.RequestException as e: + messagebox.showerror("Error", f"Failed to move supplies: {e}") + return False + + +def create_table_viewer(locations_data=None, supplies_data=None, cleanup_callback=None): + """ + Create and display the interactive table viewer window. + + Args: + locations_data: Optional list of location tuples (for test_gui.py integration) + supplies_data: Optional list of supply tuples (for test_gui.py integration) + cleanup_callback: Optional callback function to call when window closes + """ + try: + import tkinter as tk + except ImportError: + print("✗ Tkinter not available. Cannot display GUI.") + return + + root = tk.Tk() + root.title("Container Supplies - Test API") + root.geometry("900x600") + + # Set up cleanup callback if provided + if cleanup_callback: + def on_closing(): + cleanup_callback() + root.destroy() + root.protocol("WM_DELETE_WINDOW", on_closing) + + # Check API availability + api_available = check_api_available() + if not api_available: + warning_label = tk.Label( + root, + text="⚠ Test API not available!\nMake sure 'api-test' service is running on port 5001.", + fg="red", + font=("Arial", 12, "bold"), + justify=tk.CENTER + ) + warning_label.pack(pady=20) + root.mainloop() + return + + # Main frame + main_frame = tk.Frame(root, padx=20, pady=20) + main_frame.pack(fill=tk.BOTH, expand=True) + + # Title + title_label = tk.Label(main_frame, text="Container Supplies (Test API)", font=("Arial", 16, "bold")) + title_label.pack(pady=(0, 20)) + + # Container frames (3 columns) + containers_frame = tk.Frame(main_frame) + containers_frame.pack(fill=tk.BOTH, expand=True) + + container_frames = {} + container_labels = {} + + # Define functions before creating buttons (to avoid closure issues) + def update_display(supplies_data=None): + """Update the display with current supplies.""" + if supplies_data is None: + supplies_data = get_supplies_from_api() + + # Group supplies by location + supplies_by_location = {} + for supply in supplies_data: + # Handle both dict (from API) and tuple (from test_gui.py) formats + if isinstance(supply, dict): + location = supply['location'] + name = supply['name'] + amount = supply['amount'] + else: + # Tuple format: (id, name, amount, last_order_date, location) + supply_id, name, amount, last_order_date, location = supply + supply = {'id': supply_id, 'name': name, 'amount': amount, 'location': location} + + if location not in supplies_by_location: + supplies_by_location[location] = [] + supplies_by_location[location].append(supply) + + # Update each container display + for container_name in CONTAINER_NAMES: + if container_name in container_frames: + listbox = container_frames[container_name] + listbox.delete(0, tk.END) + + if container_name in supplies_by_location: + total_items = 0 + for supply in supplies_by_location[container_name]: + if isinstance(supply, dict): + name = supply['name'] + amount = supply['amount'] + else: + _, name, amount, _, _ = supply + total_items += amount + listbox.insert(tk.END, f"{name}: {amount}") + + # Update container label + if container_name in container_labels: + container_labels[container_name].config( + text=f"{container_name} ({len(supplies_by_location[container_name])} types, {total_items} total items)" + ) + else: + listbox.insert(0, "(empty)") + if container_name in container_labels: + container_labels[container_name].config(text=f"{container_name} (empty)") + + def refresh_display(): + """Refresh the display from API.""" + supplies = get_supplies_from_api() + update_display(supplies) + + def move_and_refresh(from_container, to_container): + """Move supplies and refresh display.""" + if move_supplies_via_api(from_container, to_container): + refresh_display() + + # Now create the UI elements + for i, container_name in enumerate(CONTAINER_NAMES): + # Container column + col_frame = tk.Frame(containers_frame) + col_frame.grid(row=0, column=i, padx=20, sticky="nsew") + containers_frame.columnconfigure(i, weight=1) + + # Container label + label = tk.Label(col_frame, text=f"{container_name}", font=("Arial", 12, "bold")) + label.pack(pady=(0, 10)) + container_labels[container_name] = label + + # Supplies listbox + listbox = tk.Listbox(col_frame, width=30, height=15, font=("Consolas", 10)) + listbox.pack(fill=tk.BOTH, expand=True) + container_frames[container_name] = listbox + + # Move buttons for this container + buttons_frame = tk.Frame(col_frame) + buttons_frame.pack(pady=10) + + for other_container in CONTAINER_NAMES: + if other_container != container_name: + btn_text = f"Move to {other_container}" + btn = tk.Button( + buttons_frame, + text=btn_text, + command=lambda f=container_name, t=other_container: move_and_refresh(f, t), + width=20, + height=2, + font=("Arial", 10, "bold"), + bg="#4CAF50", + fg="white" + ) + btn.pack(pady=5, fill=tk.X) + + # Refresh button + refresh_btn = tk.Button( + main_frame, + text="🔄 Refresh", + command=refresh_display, + width=20, + height=2, + font=("Arial", 10, "bold"), + bg="#2196F3", + fg="white" + ) + refresh_btn.pack(pady=10) + + # Initial display + if locations_data and supplies_data: + # Use provided data (from test_gui.py) + update_display(supplies_data) + else: + # Fetch from API (standalone mode) + refresh_display() + + root.mainloop() + + +def main(): + """Main test function.""" + print("🧪 Starting Locations & Supplies Data Test (via API)") + print(f"🌐 Test API: {TEST_API_URL}") + print("=" * 60) + + # Check if API is available + print("🔌 Checking test API availability...") + print(f" Testing connection to: {TEST_API_URL.replace('/api', '')}/health") + if not check_api_available(): + print("✗ Test API is not available!") + print(f" Make sure the 'api-test' service is running on port 5001") + print(f" Run 'make up' or 'make test' to start services") + print(f" Expected URL: {TEST_API_URL}") + sys.exit(1) + + print("✓ Test API is available") + + # Create test locations + print("\n" + "=" * 60) + loc_created, loc_skipped = create_test_locations() + + # Create test supplies + print("\n" + "=" * 60) + sup_created, sup_failed = create_test_supplies() + + # Summary + print("\n" + "=" * 60) + print("✅ TEST SUMMARY:") + print(f" Locations: {loc_created} created, {loc_skipped} skipped (already existed)") + print(f" Supplies: {sup_created} created, {sup_failed} failed") + print("=" * 60) + + # Get current data for display + print("\n📊 Fetching current data from API...") + supplies_data = get_supplies_from_api() + + # Output JSON data for test_gui.py to display + table_data = { + 'locations': { + 'columns': ['name', 'x', 'y', 'width', 'height', 'type'], + 'data': [ + [name, 200 + i * 300, 200, 200, 200, 'cabinet'] + for i, name in enumerate(CONTAINER_NAMES) + ] + }, + 'supplies': { + 'columns': ['id', 'name', 'amount', 'last_order_date', 'location'], + 'data': [ + [ + s.get('id', 0), + s['name'], + s['amount'], + s.get('last_order_date'), + s['location'] + ] + for s in supplies_data + ] + } + } + + print("\n" + "=" * 60) + print("TABLE_DATA_JSON_START") + print(json.dumps(table_data, indent=2)) + print("TABLE_DATA_JSON_END") + print("=" * 60) + + # Only open GUI viewer if running locally (not in Docker) + # When running in Docker, test_gui.py will open the viewer locally + is_docker = os.path.exists("/app") or os.getenv("HOSTNAME", "").startswith("mysql_api") + + if not is_docker: + # Running locally - open GUI viewer + print("\n🪟 Opening table viewer window...") + create_table_viewer() + else: + # Running in Docker - test_gui.py will open viewer locally + print("\n📊 Test data ready (GUI will open locally via test_gui.py)") + + print("\n✅ Test completed successfully!") + + +if __name__ == "__main__": + main() + diff --git a/src/scripts/test_tables.py b/src/scripts/test_tables.py new file mode 100644 index 0000000..749cba7 --- /dev/null +++ b/src/scripts/test_tables.py @@ -0,0 +1,300 @@ +""" +Test script to validate table creation in a test database. + +This script creates a separate test database, initializes all tables, +and verifies they were created correctly. It does not affect the production database. + +Usage: + python src/scripts/test_tables.py + +Environment variables: + DATABASE_URL: MySQL connection string (default: mysql://mysqluser:mysqlpassword@db:3306/mydb) + The test will use a database named '{database_name}_test' +""" +import os +import sys +import json +import mysql.connector +from helpers import ( + get_sql_base_path, + discover_table_files, + topological_sort_tables, + table_exists, + get_table_columns, + execute_sql_file, + parse_database_url +) + +# Get base database URL, test will use a separate database +BASE_DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + +# Base path for SQL files (works in Docker and locally) +SQL_BASE_PATH = get_sql_base_path(__file__) + + +def initialize_schema(cur, database_name=None): + """Initialize database schema in correct dependency order.""" + print("\n📋 Discovering table files...") + + # Discover all table_*.sql files recursively + table_files = discover_table_files(SQL_BASE_PATH) + + if not table_files: + print(f"⚠ No table_*.sql files found in {SQL_BASE_PATH}") + return 0, 0 + + print(f"✓ Found {len(table_files)} table file(s)") + + # Sort tables by dependency order + print("📊 Analyzing dependencies...") + sorted_tables = topological_sort_tables(table_files) + + print("\n📋 Initializing database schema...") + + success_count = 0 + for table_name, sql_file in sorted_tables: + description = f"{table_name} table" + # Check if table already exists (for idempotency) + if table_exists(cur, table_name, database_name): + print(f"⊘ {description} already exists, skipping") + continue + + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + print(f"\n✓ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") + return success_count, len(sorted_tables) + + +def drop_all_tables(cur, database_name): + """Drop all tables from the test database.""" + print(f"\n🗑️ Dropping all existing tables from '{database_name}'...") + + # Get all table names from the database + cur.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = %s", + (database_name,) + ) + tables = cur.fetchall() + + if not tables: + print(" ⊘ No tables to drop") + return + + table_names = [table[0] for table in tables] + print(f" Found {len(table_names)} table(s) to drop") + + # Disable foreign key checks to avoid constraint issues + cur.execute("SET FOREIGN_KEY_CHECKS = 0") + + dropped_count = 0 + for table_name in table_names: + try: + cur.execute(f"DROP TABLE IF EXISTS `{table_name}`") + dropped_count += 1 + except Exception as e: + print(f" ⚠ Warning: Failed to drop table '{table_name}': {e}") + + # Re-enable foreign key checks + cur.execute("SET FOREIGN_KEY_CHECKS = 1") + + print(f" ✓ Dropped {dropped_count}/{len(table_names)} table(s)") + + +def verify_tables(cur, database_name): + """Verify that all expected tables exist and have valid structure.""" + print("\n🔍 Verifying table structure...") + + # Discover all table files to get expected table names + table_files = discover_table_files(SQL_BASE_PATH) + expected_tables = [name for name, _ in table_files] + + if not expected_tables: + print("⚠ No tables found to verify") + return False + + all_valid = True + for table_name in expected_tables: + if table_exists(cur, table_name, database_name): + columns = get_table_columns(cur, table_name, database_name) + print(f"✓ {table_name}: {len(columns)} columns") + if len(columns) == 0: + print(f" ⚠ Warning: {table_name} has no columns") + all_valid = False + else: + print(f"✗ {table_name}: NOT FOUND") + all_valid = False + + return all_valid + + +def main(): + """Main test function.""" + try: + # Parse base database URL + base_params = parse_database_url(BASE_DATABASE_URL) + test_db_name = f"{base_params['database']}_test" + + print(f"🧪 Starting table validation test...") + print(f"📊 Test database: {test_db_name}") + print(f"🔌 Connecting to MySQL server...") + + # Use root credentials to create database (mysqluser may not have CREATE permission) + # Try to get root password from environment or use default + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root to create database + root_conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': 'root', + 'password': root_password + } + + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + + # Verify connection + root_cur.execute("SELECT VERSION();") + version = root_cur.fetchone()[0] + print(f"✓ Connected to MySQL: {version}") + + # Create test database if it doesn't exist + print(f"\n📦 Creating test database '{test_db_name}'...") + root_cur.execute(f"CREATE DATABASE IF NOT EXISTS `{test_db_name}`") + + # Grant permissions to mysqluser on the test database + # Use string formatting for GRANT since it's DDL, not DML + username = base_params['user'] + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + print(f"✓ Test database created and permissions granted") + + root_cur.close() + root_conn.close() + + # Small delay to ensure privileges are propagated + import time + time.sleep(0.5) + + # Now connect as mysqluser for table operations + print(f"🔌 Connecting as '{base_params['user']}' to test database...") + conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': base_params['user'], + 'password': base_params['password'], + 'database': test_db_name + } + + try: + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"✓ Connected successfully") + except mysql.connector.Error as e: + print(f"✗ Failed to connect as {base_params['user']}: {e}") + print(f" Attempting to verify privileges...") + # Try to reconnect as root to check if user exists + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"SELECT User, Host FROM mysql.user WHERE User = '{username}'") + users = root_cur.fetchall() + if not users: + print(f" ⚠ User '{username}' does not exist. Creating user...") + root_cur.execute(f"CREATE USER IF NOT EXISTS '{username}'@'%' IDENTIFIED BY '{base_params['password']}'") + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + root_cur.close() + root_conn.close() + time.sleep(0.5) + # Retry connection + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"✓ Connected successfully after creating user") + else: + raise + + # Drop all existing tables from test database for a clean start + drop_all_tables(cur, test_db_name) + conn.commit() + + # Initialize schema in test database + created_count, total_count = initialize_schema(cur, test_db_name) + conn.commit() + + # Verify tables and collect status + table_files = discover_table_files(SQL_BASE_PATH) + sorted_tables = topological_sort_tables(table_files) + + # Track which tables were created successfully and which failed + successful_tables = [] + failed_tables = [] + + for table_name, _ in sorted_tables: + if table_exists(cur, table_name, test_db_name): + # Check if it has valid structure + columns = get_table_columns(cur, table_name, test_db_name) + if len(columns) > 0: + successful_tables.append(table_name) + else: + failed_tables.append(table_name) + else: + failed_tables.append(table_name) + + # Also check verify_tables for overall status + all_valid = verify_tables(cur, test_db_name) + + # Output JSON data for GUI viewer + table_data = { + 'database': test_db_name, + 'successful_tables': successful_tables, + 'failed_tables': failed_tables, + 'summary': { + 'created': created_count, + 'total': total_count, + 'all_valid': all_valid, + 'successful_count': len(successful_tables), + 'failed_count': len(failed_tables) + } + } + + # Output JSON with special marker for test_gui.py to detect + print("\n" + "=" * 60) + print("TABLE_DATA_JSON_START") + print(json.dumps(table_data, indent=2)) + print("TABLE_DATA_JSON_END") + print("=" * 60) + + # Summary + print("\n" + "="*50) + if all_valid and created_count == total_count: + print("✅ TEST PASSED: All tables created and verified successfully!") + print(f" Created {created_count}/{total_count} tables") + print(f" Test database '{test_db_name}' is ready for inspection") + print(f" (Database will be kept for manual inspection)") + elif all_valid: + print("⚠️ TEST PARTIAL: All tables exist but some were skipped") + print(f" Created {created_count}/{total_count} tables") + else: + print("❌ TEST FAILED: Some tables are missing or invalid") + sys.exit(1) + print("="*50) + + cur.close() + conn.close() + + except mysql.connector.Error as e: + print(f"✗ Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"✗ Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + main() + diff --git a/src/sql/applicants/table_applicants.sql b/src/sql/applicants/table_applicants.sql new file mode 100644 index 0000000..c9ea838 --- /dev/null +++ b/src/sql/applicants/table_applicants.sql @@ -0,0 +1,9 @@ +CREATE TABLE applicants ( + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + uf_id CHAR(8) NOT NULL CHECK (uf_id REGEXP '^[0-9]{8}$') PRIMARY KEY, + discord_user VARCHAR(150), + github_user VARCHAR(150), + qualtrics_link VARCHAR(200) +); + diff --git a/src/sql/location/table_locations.sql b/src/sql/location/table_locations.sql new file mode 100644 index 0000000..2221ae7 --- /dev/null +++ b/src/sql/location/table_locations.sql @@ -0,0 +1,10 @@ +-- Location has coordinates that will be used to position it in the svg in the frontend. +CREATE TABLE locations ( + name VARCHAR(50) PRIMARY KEY, + x INTEGER NOT NULL, + y INTEGER NOT NULL, + width INTEGER NOT NULL, + height INTEGER NOT NULL, + type VARCHAR(50) NOT NULL +); + diff --git a/src/sql/members/insert_member.sql b/src/sql/members/insert_member.sql new file mode 100644 index 0000000..ad7b841 --- /dev/null +++ b/src/sql/members/insert_member.sql @@ -0,0 +1,3 @@ +-- Inserts into members table and returns the uf_id of the new member +INSERT INTO members(first_name,last_name,uf_id,uf_email,phone_number,team,discord,github,grad_date,join_date,is_leader) +VALUES (%(first)s,%(last)s,%(ufid)s,%(email)s,%(phone)s,%(team)s,%(discord)s,%(github)s,%(grad)s,%(join)s,%(leader)s) \ No newline at end of file diff --git a/src/sql/members/table_members.sql b/src/sql/members/table_members.sql new file mode 100644 index 0000000..a5baaa7 --- /dev/null +++ b/src/sql/members/table_members.sql @@ -0,0 +1,13 @@ +CREATE TABLE members ( + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + uf_id CHAR(8) NOT NULL CHECK (uf_id REGEXP '^[0-9]{8}$') PRIMARY KEY, + uf_email VARCHAR(150) NOT NULL UNIQUE, + phone_number VARCHAR(50), + team VARCHAR(50) REFERENCES teams(name), + discord VARCHAR(150) NOT NULL UNIQUE, + github VARCHAR(150) NOT NULL UNIQUE, + grad_date DATE, + join_date DATE, + is_leader BOOLEAN DEFAULT FALSE +); \ No newline at end of file diff --git a/src/sql/orders/table_orders.sql b/src/sql/orders/table_orders.sql new file mode 100644 index 0000000..4a2fb0b --- /dev/null +++ b/src/sql/orders/table_orders.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS orders ( + order_id BIGINT AUTO_INCREMENT PRIMARY KEY, + item_name VARCHAR(50), + count INT, + company VARCHAR(50), + item_description VARCHAR(200), + cost_estimate INT, + purchase_link VARCHAR(200), + requester_id CHAR(8) REFERENCES members(uf_id), + leader_id CHAR(8) REFERENCES members(uf_id) +); diff --git a/src/sql/supplies/table_supplies.sql b/src/sql/supplies/table_supplies.sql new file mode 100644 index 0000000..0f18b44 --- /dev/null +++ b/src/sql/supplies/table_supplies.sql @@ -0,0 +1,8 @@ +CREATE TABLE supplies ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(200) NOT NULL, + amount INTEGER NOT NULL DEFAULT 0, + last_order_date DATE, + location VARCHAR(50) REFERENCES locations(name), + UNIQUE KEY unique_supply_location (name, location) +); diff --git a/src/sql/table.sql b/src/sql/table.sql deleted file mode 100644 index 7b09ce9..0000000 --- a/src/sql/table.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE table( - id INT, - name VARCHAR(100), - age INT, - last_update DATETIME DEFAULT (CURRENT_TIME()), - PRIMARY KEY(id) -); - ---WARNING: Inserts should be done in the scripts NOT IN THE SQL FILE-- - -INSERT INTO table(id, name, age) -VALUES -(1, "Robin", 22), -(2, "Beast Boy", 21), -(3, "Star Fire", 400), -(4, "Raven", 20), -(5, "Cyborg", 21); diff --git a/src/sql/teams/table_teams.sql b/src/sql/teams/table_teams.sql new file mode 100644 index 0000000..28a64ca --- /dev/null +++ b/src/sql/teams/table_teams.sql @@ -0,0 +1,9 @@ +CREATE TABLE teams ( + name VARCHAR(50) CHECK (name IN ('Software', 'Electrical', 'Mechanical')) PRIMARY KEY +); + +INSERT INTO teams (name) +VALUES +('Software'), +('Electrical'), +('Mechanical'); diff --git a/src/sql/weekly_reports/insert_weekly_report.sql b/src/sql/weekly_reports/insert_weekly_report.sql new file mode 100644 index 0000000..129f2d9 --- /dev/null +++ b/src/sql/weekly_reports/insert_weekly_report.sql @@ -0,0 +1,4 @@ +-- Inserts into members table and returns the uf_id of the new member +INSERT INTO weekly_reports(uf_id, report_date, progress_rating) +VALUES (%(uf_id)s,%(report_date)s,%(progress_rating)s) +RETURNING uf_id; \ No newline at end of file diff --git a/src/sql/weekly_reports/table_weekly_reports.sql b/src/sql/weekly_reports/table_weekly_reports.sql new file mode 100644 index 0000000..5f7fb75 --- /dev/null +++ b/src/sql/weekly_reports/table_weekly_reports.sql @@ -0,0 +1,6 @@ +CREATE TABLE weekly_reports ( + uf_id CHAR(8) REFERENCES members(uf_id), + report_date DATE NOT NULL, + progress_rating VARCHAR(50) CHECK (progress_rating IN ('Red', 'Yellow', 'Green')), + PRIMARY KEY (uf_id, report_date) +);