From 519562c8c2d85419815788eea29320ba7116f8a7 Mon Sep 17 00:00:00 2001 From: willzoo Date: Thu, 25 Sep 2025 14:55:29 -0400 Subject: [PATCH 01/20] added names to table --- src/sql/table.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sql/table.sql b/src/sql/table.sql index 7b09ce9..3ba0c5e 100644 --- a/src/sql/table.sql +++ b/src/sql/table.sql @@ -1,4 +1,4 @@ -CREATE table( +CREATE TABLE students( id INT, name VARCHAR(100), age INT, @@ -8,7 +8,7 @@ CREATE table( --WARNING: Inserts should be done in the scripts NOT IN THE SQL FILE-- -INSERT INTO table(id, name, age) +INSERT INTO students(id, name, age) VALUES (1, "Robin", 22), (2, "Beast Boy", 21), From ae7dc45fcee3d448ddc57bb679aaa41e534e86af Mon Sep 17 00:00:00 2001 From: willzoo Date: Thu, 2 Oct 2025 14:59:36 -0400 Subject: [PATCH 02/20] Working sql commands execute to a local db name db with user:password postgres:admin --- src/scripts/app.py | 11 ++++++++++- src/sql/table.sql | 17 ++++++++--------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/scripts/app.py b/src/scripts/app.py index 9ea61ca..e8cf5c8 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -1,13 +1,22 @@ import os import psycopg2 -DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") +DATABASE_URL = "postgres://postgres:admin@localhost:5432/postgres" try: conn = psycopg2.connect(DATABASE_URL) cur = conn.cursor() cur.execute("SELECT version();") print("Postgres version:", cur.fetchone()) + + # Execute SQL from file + with open("../sql/table.sql", "r") as f: + sql = f.read() + print("Executing SQL from file...") + print(sql) + cur.execute(sql) + conn.commit() + print("SQL executed successfully.") cur.close() conn.close() except Exception as e: diff --git a/src/sql/table.sql b/src/sql/table.sql index 3ba0c5e..28c6485 100644 --- a/src/sql/table.sql +++ b/src/sql/table.sql @@ -1,8 +1,7 @@ CREATE TABLE students( - id INT, - name VARCHAR(100), - age INT, - last_update DATETIME DEFAULT (CURRENT_TIME()), + id INT, + name VARCHAR(100), + age INT, PRIMARY KEY(id) ); @@ -10,8 +9,8 @@ CREATE TABLE students( INSERT INTO students(id, name, age) VALUES -(1, "Robin", 22), -(2, "Beast Boy", 21), -(3, "Star Fire", 400), -(4, "Raven", 20), -(5, "Cyborg", 21); +(1, 'Robin', 22), +(2, 'Beast Boy', 21), +(3, 'Star Fire', 400), +(4, 'Raven', 20), +(5, 'Cyborg', 21); From 18f58bd4202feb6fdba8d5c75f2100c71e42548d Mon Sep 17 00:00:00 2001 From: willzoo Date: Thu, 2 Oct 2025 15:21:40 -0400 Subject: [PATCH 03/20] Closer but postgres_app | sh: 1: pg_isready: not found error --- docker-compose.yaml | 6 +----- src/scripts/app.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 4907c24..1efd7c0 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,5 +1,3 @@ -version: "3.9" - services: db: image: postgres:16 @@ -23,9 +21,7 @@ services: DATABASE_URL: postgres://postgres:postgres@db:5432/mydb volumes: - .:/app - - ./src/sql:/docker-entrypoint-initb.d - command: ["python", "src/scripts/app.py"] + command: ["sh", "-c", "until pg_isready -h db -p 5432; do sleep 1; done && python src/scripts/app.py"] volumes: db_data: - diff --git a/src/scripts/app.py b/src/scripts/app.py index e8cf5c8..24761aa 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -1,7 +1,7 @@ import os import psycopg2 -DATABASE_URL = "postgres://postgres:admin@localhost:5432/postgres" +DATABASE_URL = "postgres://postgres:postgres@db:5432/mydb" try: conn = psycopg2.connect(DATABASE_URL) From 50a37ceaeaaa4c7e08327fe490a2af85dd816566 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 5 Oct 2025 22:22:35 -0400 Subject: [PATCH 04/20] Fixed error with docker not running SQL file --- Dockerfile | 5 +---- src/scripts/app.py | 5 +++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 90d56a8..323150f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ WORKDIR /app # Install system dependencies for psycopg2 RUN apt-get update && apt-get install -y \ - gcc libpq-dev && \ + gcc libpq-dev postgresql-client && \ rm -rf /var/lib/apt/lists/* # Install Python dependencies @@ -14,6 +14,3 @@ RUN pip install --no-cache-dir -r requirements.txt # Copy source code COPY . . - -CMD ["python", "app.py"] - diff --git a/src/scripts/app.py b/src/scripts/app.py index 24761aa..abe6d55 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -1,7 +1,8 @@ import os import psycopg2 -DATABASE_URL = "postgres://postgres:postgres@db:5432/mydb" +DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") + try: conn = psycopg2.connect(DATABASE_URL) @@ -10,7 +11,7 @@ print("Postgres version:", cur.fetchone()) # Execute SQL from file - with open("../sql/table.sql", "r") as f: + with open("/app/src/sql/table.sql", "r") as f: sql = f.read() print("Executing SQL from file...") print(sql) From 46e81d677193ee1e1aaa2b830d0c8c4016fd6d47 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 5 Oct 2025 22:59:21 -0400 Subject: [PATCH 05/20] Added sql function to app.py and student tables based on spreadsheet --- src/scripts/app.py | 27 +++++++++++++++++++-------- src/sql/student_progress.sql | 6 ++++++ src/sql/students.sql | 9 +++++++++ src/sql/table.sql | 16 ---------------- 4 files changed, 34 insertions(+), 24 deletions(-) create mode 100644 src/sql/student_progress.sql create mode 100644 src/sql/students.sql delete mode 100644 src/sql/table.sql diff --git a/src/scripts/app.py b/src/scripts/app.py index abe6d55..8432c10 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -3,23 +3,34 @@ DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") +# Function to execute SQL from a file +def execute_sql_file(file_path: str): + try: + conn = psycopg2.connect(DATABASE_URL) + cur = conn.cursor() + with open(file_path, 'r') as file: + sql = file.read() + with cur: + cur.execute(sql) + conn.commit() + except Exception as e: + print(f"Unable to execute SQL file at {file_path}: ", e) try: conn = psycopg2.connect(DATABASE_URL) cur = conn.cursor() cur.execute("SELECT version();") print("Postgres version:", cur.fetchone()) + + # Execute SQL files + execute_sql_file('/app/src/sql/students.sql') + execute_sql_file('/app/src/sql/student_progress.sql') - # Execute SQL from file - with open("/app/src/sql/table.sql", "r") as f: - sql = f.read() - print("Executing SQL from file...") - print(sql) - cur.execute(sql) - conn.commit() - print("SQL executed successfully.") + # Finish up + print("Creation of all SQL Tables succeeded or attempted.") cur.close() conn.close() + except Exception as e: print("Database connection failed:", e) diff --git a/src/sql/student_progress.sql b/src/sql/student_progress.sql new file mode 100644 index 0000000..e603a5b --- /dev/null +++ b/src/sql/student_progress.sql @@ -0,0 +1,6 @@ +CREATE TABLE student_progress ( + progress_id SERIAL PRIMARY KEY, + student_id INT REFERENCES students(student_id), + report_date DATE NOT NULL, + progress_percent INT DEFAULT 0 +); diff --git a/src/sql/students.sql b/src/sql/students.sql new file mode 100644 index 0000000..701e37b --- /dev/null +++ b/src/sql/students.sql @@ -0,0 +1,9 @@ +CREATE TABLE students ( + student_id SERIAL PRIMARY KEY, + student_name VARCHAR(100) NOT NULL, + email VARCHAR(150) NOT NULL UNIQUE, + ufid VARCHAR(20) NOT NULL UNIQUE, + leader BOOLEAN DEFAULT FALSE, + team VARCHAR(50), + discord_id VARCHAR(50) NOT NULL UNIQUE +); \ No newline at end of file diff --git a/src/sql/table.sql b/src/sql/table.sql deleted file mode 100644 index 28c6485..0000000 --- a/src/sql/table.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE TABLE students( - id INT, - name VARCHAR(100), - age INT, - PRIMARY KEY(id) -); - ---WARNING: Inserts should be done in the scripts NOT IN THE SQL FILE-- - -INSERT INTO students(id, name, age) -VALUES -(1, 'Robin', 22), -(2, 'Beast Boy', 21), -(3, 'Star Fire', 400), -(4, 'Raven', 20), -(5, 'Cyborg', 21); From 28bd965018406f8cadf64a8e4a03026956b71add Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 5 Oct 2025 23:06:46 -0400 Subject: [PATCH 06/20] Edit students sql based on the issue in mil-sql --- src/sql/students.sql | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/sql/students.sql b/src/sql/students.sql index 701e37b..244a3e0 100644 --- a/src/sql/students.sql +++ b/src/sql/students.sql @@ -1,9 +1,13 @@ CREATE TABLE students ( - student_id SERIAL PRIMARY KEY, - student_name VARCHAR(100) NOT NULL, - email VARCHAR(150) NOT NULL UNIQUE, - ufid VARCHAR(20) NOT NULL UNIQUE, - leader BOOLEAN DEFAULT FALSE, + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + uf_id SERIAL PRIMARY KEY, + uf_email VARCHAR(150) NOT NULL UNIQUE, + phone_number VARCHAR(50), team VARCHAR(50), - discord_id VARCHAR(50) NOT NULL UNIQUE + discord VARCHAR(150) NOT NULL UNIQUE, + github VARCHAR(150) NOT NULL UNIQUE, + grad_date DATE, + join_date DATE, + is_leader BOOLEAN DEFAULT FALSE ); \ No newline at end of file From 2957ee063dc03283b0b5fa8f42c7e5a2936ea719 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 5 Oct 2025 23:08:14 -0400 Subject: [PATCH 07/20] renamed students to members --- src/scripts/app.py | 6 +++--- src/sql/{student_progress.sql => member_progress.sql} | 4 ++-- src/sql/{students.sql => members.sql} | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) rename src/sql/{student_progress.sql => member_progress.sql} (55%) rename src/sql/{students.sql => members.sql} (93%) diff --git a/src/scripts/app.py b/src/scripts/app.py index 8432c10..6cbc63a 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -21,10 +21,10 @@ def execute_sql_file(file_path: str): cur = conn.cursor() cur.execute("SELECT version();") print("Postgres version:", cur.fetchone()) - + # Execute SQL files - execute_sql_file('/app/src/sql/students.sql') - execute_sql_file('/app/src/sql/student_progress.sql') + execute_sql_file('/app/src/sql/members.sql') + execute_sql_file('/app/src/sql/member_progress.sql') # Finish up print("Creation of all SQL Tables succeeded or attempted.") diff --git a/src/sql/student_progress.sql b/src/sql/member_progress.sql similarity index 55% rename from src/sql/student_progress.sql rename to src/sql/member_progress.sql index e603a5b..f4af2a9 100644 --- a/src/sql/student_progress.sql +++ b/src/sql/member_progress.sql @@ -1,6 +1,6 @@ -CREATE TABLE student_progress ( +CREATE TABLE member_progress ( progress_id SERIAL PRIMARY KEY, - student_id INT REFERENCES students(student_id), + uf_id INT REFERENCES members(uf_id), report_date DATE NOT NULL, progress_percent INT DEFAULT 0 ); diff --git a/src/sql/students.sql b/src/sql/members.sql similarity index 93% rename from src/sql/students.sql rename to src/sql/members.sql index 244a3e0..a8a4449 100644 --- a/src/sql/students.sql +++ b/src/sql/members.sql @@ -1,4 +1,4 @@ -CREATE TABLE students ( +CREATE TABLE members ( first_name VARCHAR(100) NOT NULL, last_name VARCHAR(100) NOT NULL, uf_id SERIAL PRIMARY KEY, From 1a36c29407917b730664f8d52dddabbfbbc1a989 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 12 Oct 2025 22:06:28 -0400 Subject: [PATCH 08/20] Added insert member script and it executes in app.py --- src/scripts/app.py | 60 ++++++++++++++++++++++-------- src/sql/insert_member.sql | 4 ++ src/sql/member_progress.sql | 6 --- src/sql/tables/member_progress.sql | 6 +++ src/sql/{ => tables}/members.sql | 0 5 files changed, 54 insertions(+), 22 deletions(-) create mode 100644 src/sql/insert_member.sql delete mode 100644 src/sql/member_progress.sql create mode 100644 src/sql/tables/member_progress.sql rename src/sql/{ => tables}/members.sql (100%) diff --git a/src/scripts/app.py b/src/scripts/app.py index 6cbc63a..44b645b 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -1,20 +1,13 @@ +from pathlib import Path import os import psycopg2 DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") -# Function to execute SQL from a file -def execute_sql_file(file_path: str): - try: - conn = psycopg2.connect(DATABASE_URL) - cur = conn.cursor() - with open(file_path, 'r') as file: - sql = file.read() - with cur: - cur.execute(sql) - conn.commit() - except Exception as e: - print(f"Unable to execute SQL file at {file_path}: ", e) +# Global SQL strings +insert_member = Path("/app/src/sql/insert_member.sql").read_text() +members_table = Path("/app/src/sql/tables/members.sql").read_text() +member_progress_table = Path("/app/src/sql/tables/member_progress.sql").read_text() try: conn = psycopg2.connect(DATABASE_URL) @@ -22,12 +15,47 @@ def execute_sql_file(file_path: str): cur.execute("SELECT version();") print("Postgres version:", cur.fetchone()) - # Execute SQL files - execute_sql_file('/app/src/sql/members.sql') - execute_sql_file('/app/src/sql/member_progress.sql') + # Drop old tables for testing purposes + cur.execute("DROP TABLE IF EXISTS member_progress;") + cur.execute("DROP TABLE IF EXISTS members;") + conn.commit() + + # Make SQL Tables + try: + cur.execute(members_table) + except Exception as e: + print("Make member table failed.", e) + else: + print("Make member table succeeded.") + try: + cur.execute(member_progress_table) + except Exception as e: + print("Make progress table failed.", e) + else: + print("Make progress table succeeded.") + + # Insert a test member + try: + cur.execute(insert_member, { + "first": "Albert", + "last": "Gator", + "email": "albert.gator@ufl.edu", + "phone": "352-201-0001", + "team": "Mechanical", + "discord":"AlbertDiscord", + "github": "AlbertGithub", + "grad": "2026-05-01", + "join": "2024-09-01", + "leader": False, + }) + except Exception as e: + print("Insert member failed:", e) + else: + print("Insert member succeeded.") # Finish up - print("Creation of all SQL Tables succeeded or attempted.") + print("Connection committing and then closing.") + conn.commit() cur.close() conn.close() diff --git a/src/sql/insert_member.sql b/src/sql/insert_member.sql new file mode 100644 index 0000000..f1577ee --- /dev/null +++ b/src/sql/insert_member.sql @@ -0,0 +1,4 @@ +-- Inserts into members table and returns the uf_id of the new member +INSERT INTO members(first_name,last_name,uf_email,phone_number,team,discord,github,grad_date,join_date,is_leader) +VALUES (%(first)s,%(last)s,%(email)s,%(phone)s,%(team)s,%(discord)s,%(github)s,%(grad)s,%(join)s,%(leader)s) +RETURNING uf_id; diff --git a/src/sql/member_progress.sql b/src/sql/member_progress.sql deleted file mode 100644 index f4af2a9..0000000 --- a/src/sql/member_progress.sql +++ /dev/null @@ -1,6 +0,0 @@ -CREATE TABLE member_progress ( - progress_id SERIAL PRIMARY KEY, - uf_id INT REFERENCES members(uf_id), - report_date DATE NOT NULL, - progress_percent INT DEFAULT 0 -); diff --git a/src/sql/tables/member_progress.sql b/src/sql/tables/member_progress.sql new file mode 100644 index 0000000..0b5385d --- /dev/null +++ b/src/sql/tables/member_progress.sql @@ -0,0 +1,6 @@ +CREATE TABLE member_progress ( + uf_id INT REFERENCES members(uf_id), + report_date DATE NOT NULL, + progress_rating VARCHAR(50) CHECK (progress_rating IN ('Red', 'Yellow', 'Green')), + PRIMARY KEY (uf_id, report_date) +); diff --git a/src/sql/members.sql b/src/sql/tables/members.sql similarity index 100% rename from src/sql/members.sql rename to src/sql/tables/members.sql From 8784299eac2d29fba85453fba03b09a38a24e952 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 19 Oct 2025 20:44:14 -0400 Subject: [PATCH 09/20] Defined a lot more tables --- src/scripts/app.py | 46 +++++++++++-------- src/sql/insert_member.sql | 4 -- src/sql/members/insert_member.sql | 3 ++ src/sql/members/table_members.sql | 13 ++++++ src/sql/orders/table_orders.sql | 11 +++++ src/sql/supplies/table_supplies.sql | 6 +++ src/sql/tables/members.sql | 13 ------ src/sql/teams/table_teams.sql | 9 ++++ .../weekly_reports/insert_weekly_report.sql | 4 ++ .../table_weekly_reports.sql} | 6 +-- 10 files changed, 77 insertions(+), 38 deletions(-) delete mode 100644 src/sql/insert_member.sql create mode 100644 src/sql/members/insert_member.sql create mode 100644 src/sql/members/table_members.sql create mode 100644 src/sql/orders/table_orders.sql create mode 100644 src/sql/supplies/table_supplies.sql delete mode 100644 src/sql/tables/members.sql create mode 100644 src/sql/teams/table_teams.sql create mode 100644 src/sql/weekly_reports/insert_weekly_report.sql rename src/sql/{tables/member_progress.sql => weekly_reports/table_weekly_reports.sql} (51%) diff --git a/src/scripts/app.py b/src/scripts/app.py index 44b645b..7ff18a8 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -4,10 +4,23 @@ DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") -# Global SQL strings -insert_member = Path("/app/src/sql/insert_member.sql").read_text() -members_table = Path("/app/src/sql/tables/members.sql").read_text() -member_progress_table = Path("/app/src/sql/tables/member_progress.sql").read_text() +# Create Table SQL strings +table_members = Path("/app/src/sql/members/table_members.sql").read_text() +table_weekly_reports = Path("/app/src/sql/weekly_reports/table_weekly_reports.sql").read_text() +table_teams = Path("/app/src/sql/teams/table_teams.sql").read_text() +table_supplies = Path("/app/src/sql/supplies/table_supplies.sql").read_text() +table_orders = Path("/app/src/sql/orders/table_orders.sql").read_text() +# Insert SQL strings +insert_member = Path("/app/src/sql/members/insert_member.sql").read_text() + +# Make table, log if success or failure +def make_table(cur, table_sql, table_name): + try: + cur.execute(table_sql) + except Exception as e: + print(f"Make {table_name} table failed.", e) + else: + print(f"Make {table_name} table succeeded.") try: conn = psycopg2.connect(DATABASE_URL) @@ -16,29 +29,26 @@ print("Postgres version:", cur.fetchone()) # Drop old tables for testing purposes - cur.execute("DROP TABLE IF EXISTS member_progress;") - cur.execute("DROP TABLE IF EXISTS members;") + cur.execute("DROP TABLE IF EXISTS members CASCADE;") + cur.execute("DROP TABLE IF EXISTS weekly_reports;") + cur.execute("DROP TABLE IF EXISTS teams;") + cur.execute("DROP TABLE IF EXISTS supplies;") + cur.execute("DROP TABLE IF EXISTS orders;") conn.commit() # Make SQL Tables - try: - cur.execute(members_table) - except Exception as e: - print("Make member table failed.", e) - else: - print("Make member table succeeded.") - try: - cur.execute(member_progress_table) - except Exception as e: - print("Make progress table failed.", e) - else: - print("Make progress table succeeded.") + make_table(cur, table_members, "members") + make_table(cur, table_weekly_reports, "weekly_reports") + make_table(cur, table_teams, "teams") + make_table(cur, table_supplies, "supplies") + make_table(cur, table_orders, "orders") # Insert a test member try: cur.execute(insert_member, { "first": "Albert", "last": "Gator", + "ufid": "12345678", "email": "albert.gator@ufl.edu", "phone": "352-201-0001", "team": "Mechanical", diff --git a/src/sql/insert_member.sql b/src/sql/insert_member.sql deleted file mode 100644 index f1577ee..0000000 --- a/src/sql/insert_member.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Inserts into members table and returns the uf_id of the new member -INSERT INTO members(first_name,last_name,uf_email,phone_number,team,discord,github,grad_date,join_date,is_leader) -VALUES (%(first)s,%(last)s,%(email)s,%(phone)s,%(team)s,%(discord)s,%(github)s,%(grad)s,%(join)s,%(leader)s) -RETURNING uf_id; diff --git a/src/sql/members/insert_member.sql b/src/sql/members/insert_member.sql new file mode 100644 index 0000000..ad7b841 --- /dev/null +++ b/src/sql/members/insert_member.sql @@ -0,0 +1,3 @@ +-- Inserts into members table and returns the uf_id of the new member +INSERT INTO members(first_name,last_name,uf_id,uf_email,phone_number,team,discord,github,grad_date,join_date,is_leader) +VALUES (%(first)s,%(last)s,%(ufid)s,%(email)s,%(phone)s,%(team)s,%(discord)s,%(github)s,%(grad)s,%(join)s,%(leader)s) \ No newline at end of file diff --git a/src/sql/members/table_members.sql b/src/sql/members/table_members.sql new file mode 100644 index 0000000..aaf3076 --- /dev/null +++ b/src/sql/members/table_members.sql @@ -0,0 +1,13 @@ +CREATE TABLE members ( + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + uf_id CHAR(8) NOT NULL CHECK (uf_id ~ '^[0-9]{8}$') PRIMARY KEY, + uf_email VARCHAR(150) NOT NULL UNIQUE, + phone_number VARCHAR(50), + team VARCHAR(50), + discord VARCHAR(150) NOT NULL UNIQUE, + github VARCHAR(150) NOT NULL UNIQUE, + grad_date DATE, + join_date DATE, + is_leader BOOLEAN DEFAULT FALSE +); \ No newline at end of file diff --git a/src/sql/orders/table_orders.sql b/src/sql/orders/table_orders.sql new file mode 100644 index 0000000..2c22e36 --- /dev/null +++ b/src/sql/orders/table_orders.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS orders ( + order_id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + item_name VARCHAR(50), + count INT, + company VARCHAR(50), + item_description VARCHAR(200), + cost_estimate INT, + purchase_link VARCHAR(200), + requester_id CHAR(8) REFERENCES members(uf_id), + leader_id CHAR(8) REFERENCES members(uf_id) +); diff --git a/src/sql/supplies/table_supplies.sql b/src/sql/supplies/table_supplies.sql new file mode 100644 index 0000000..53edcf5 --- /dev/null +++ b/src/sql/supplies/table_supplies.sql @@ -0,0 +1,6 @@ +CREATE TABLE supplies ( + id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + name VARCHAR(200) NOT NULL, + amount INTEGER NOT NULL DEFAULT 0, + last_order_date DATE +); diff --git a/src/sql/tables/members.sql b/src/sql/tables/members.sql deleted file mode 100644 index a8a4449..0000000 --- a/src/sql/tables/members.sql +++ /dev/null @@ -1,13 +0,0 @@ -CREATE TABLE members ( - first_name VARCHAR(100) NOT NULL, - last_name VARCHAR(100) NOT NULL, - uf_id SERIAL PRIMARY KEY, - uf_email VARCHAR(150) NOT NULL UNIQUE, - phone_number VARCHAR(50), - team VARCHAR(50), - discord VARCHAR(150) NOT NULL UNIQUE, - github VARCHAR(150) NOT NULL UNIQUE, - grad_date DATE, - join_date DATE, - is_leader BOOLEAN DEFAULT FALSE -); \ No newline at end of file diff --git a/src/sql/teams/table_teams.sql b/src/sql/teams/table_teams.sql new file mode 100644 index 0000000..28a64ca --- /dev/null +++ b/src/sql/teams/table_teams.sql @@ -0,0 +1,9 @@ +CREATE TABLE teams ( + name VARCHAR(50) CHECK (name IN ('Software', 'Electrical', 'Mechanical')) PRIMARY KEY +); + +INSERT INTO teams (name) +VALUES +('Software'), +('Electrical'), +('Mechanical'); diff --git a/src/sql/weekly_reports/insert_weekly_report.sql b/src/sql/weekly_reports/insert_weekly_report.sql new file mode 100644 index 0000000..129f2d9 --- /dev/null +++ b/src/sql/weekly_reports/insert_weekly_report.sql @@ -0,0 +1,4 @@ +-- Inserts into members table and returns the uf_id of the new member +INSERT INTO weekly_reports(uf_id, report_date, progress_rating) +VALUES (%(uf_id)s,%(report_date)s,%(progress_rating)s) +RETURNING uf_id; \ No newline at end of file diff --git a/src/sql/tables/member_progress.sql b/src/sql/weekly_reports/table_weekly_reports.sql similarity index 51% rename from src/sql/tables/member_progress.sql rename to src/sql/weekly_reports/table_weekly_reports.sql index 0b5385d..5f7fb75 100644 --- a/src/sql/tables/member_progress.sql +++ b/src/sql/weekly_reports/table_weekly_reports.sql @@ -1,6 +1,6 @@ -CREATE TABLE member_progress ( - uf_id INT REFERENCES members(uf_id), - report_date DATE NOT NULL, +CREATE TABLE weekly_reports ( + uf_id CHAR(8) REFERENCES members(uf_id), + report_date DATE NOT NULL, progress_rating VARCHAR(50) CHECK (progress_rating IN ('Red', 'Yellow', 'Green')), PRIMARY KEY (uf_id, report_date) ); From d5bcd695c17a3c87d416fbd53b0e8b7c23754524 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 19 Oct 2025 21:25:27 -0400 Subject: [PATCH 10/20] members reference teams --- src/sql/members/table_members.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sql/members/table_members.sql b/src/sql/members/table_members.sql index aaf3076..35a2266 100644 --- a/src/sql/members/table_members.sql +++ b/src/sql/members/table_members.sql @@ -4,7 +4,7 @@ CREATE TABLE members ( uf_id CHAR(8) NOT NULL CHECK (uf_id ~ '^[0-9]{8}$') PRIMARY KEY, uf_email VARCHAR(150) NOT NULL UNIQUE, phone_number VARCHAR(50), - team VARCHAR(50), + team VARCHAR(50) REFERENCES teams(name), discord VARCHAR(150) NOT NULL UNIQUE, github VARCHAR(150) NOT NULL UNIQUE, grad_date DATE, From 6f33d362aae8af4de3d142413d84923cf7778c35 Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 26 Oct 2025 23:28:07 -0400 Subject: [PATCH 11/20] Added applicants table, location table, and key to supplies table --- src/scripts/app.py | 8 +++++++- src/sql/applicants/table_applicants.sql | 9 +++++++++ src/sql/location/table_location.sql | 5 +++++ src/sql/supplies/table_supplies.sql | 3 ++- 4 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 src/sql/applicants/table_applicants.sql create mode 100644 src/sql/location/table_location.sql diff --git a/src/scripts/app.py b/src/scripts/app.py index 7ff18a8..8c24e0a 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -10,6 +10,8 @@ table_teams = Path("/app/src/sql/teams/table_teams.sql").read_text() table_supplies = Path("/app/src/sql/supplies/table_supplies.sql").read_text() table_orders = Path("/app/src/sql/orders/table_orders.sql").read_text() +table_applicants = Path("/app/src/sql/applicants/table_applicants.sql").read_text() +table_locations = Path("/app/src/sql/location/table_location.sql").read_text() # Insert SQL strings insert_member = Path("/app/src/sql/members/insert_member.sql").read_text() @@ -34,14 +36,18 @@ def make_table(cur, table_sql, table_name): cur.execute("DROP TABLE IF EXISTS teams;") cur.execute("DROP TABLE IF EXISTS supplies;") cur.execute("DROP TABLE IF EXISTS orders;") + cur.execute("DROP TABLE IF EXISTS applicants;") + cur.execute("DROP TABLE IF EXISTS locations;") conn.commit() # Make SQL Tables + make_table(cur, table_teams, "teams") + make_table(cur, table_locations, "locations") make_table(cur, table_members, "members") make_table(cur, table_weekly_reports, "weekly_reports") - make_table(cur, table_teams, "teams") make_table(cur, table_supplies, "supplies") make_table(cur, table_orders, "orders") + make_table(cur, table_applicants, "applicants") # Insert a test member try: diff --git a/src/sql/applicants/table_applicants.sql b/src/sql/applicants/table_applicants.sql new file mode 100644 index 0000000..6642fc6 --- /dev/null +++ b/src/sql/applicants/table_applicants.sql @@ -0,0 +1,9 @@ +CREATE TABLE applicants ( + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + uf_id CHAR(8) NOT NULL CHECK (uf_id ~ '^[0-9]{8}$') PRIMARY KEY, + discord_user VARCHAR(150), + github_user VARCHAR(150), + qualtrics_link VARCHAR(200) +); + diff --git a/src/sql/location/table_location.sql b/src/sql/location/table_location.sql new file mode 100644 index 0000000..d0340d4 --- /dev/null +++ b/src/sql/location/table_location.sql @@ -0,0 +1,5 @@ +-- TODO: Add encoding for actual position coordinates later +CREATE TABLE locations ( + name VARCHAR(50) PRIMARY KEY +); + diff --git a/src/sql/supplies/table_supplies.sql b/src/sql/supplies/table_supplies.sql index 53edcf5..b5ea04f 100644 --- a/src/sql/supplies/table_supplies.sql +++ b/src/sql/supplies/table_supplies.sql @@ -2,5 +2,6 @@ CREATE TABLE supplies ( id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name VARCHAR(200) NOT NULL, amount INTEGER NOT NULL DEFAULT 0, - last_order_date DATE + last_order_date DATE, + location VARCHAR(50) REFERENCES locations(name) ); From f4cc67485117a22b69387ee0a4154aad2c69027b Mon Sep 17 00:00:00 2001 From: willzoo Date: Sun, 26 Oct 2025 23:29:47 -0400 Subject: [PATCH 12/20] more comments --- src/sql/location/table_location.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sql/location/table_location.sql b/src/sql/location/table_location.sql index d0340d4..7505f60 100644 --- a/src/sql/location/table_location.sql +++ b/src/sql/location/table_location.sql @@ -1,4 +1,6 @@ -- TODO: Add encoding for actual position coordinates later +-- Or alternatively, short instructions on how to find the location in the lab +-- Or both CREATE TABLE locations ( name VARCHAR(50) PRIMARY KEY ); From cfc339ee0a408ce4a5bb97b7c36a56902033a208 Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 11:17:29 -0500 Subject: [PATCH 13/20] move everything to mysql from postgres, add table_testing script that auto looks in test folders --- Dockerfile | 4 +- Makefile | 17 +- README.md | 44 ++++- docker-compose.yaml | 30 ++-- requirements.txt | 2 +- src/scripts/app.py | 155 +++++++++-------- src/scripts/helpers.py | 193 +++++++++++++++++++++ src/scripts/test_tables.py | 220 ++++++++++++++++++++++++ src/sql/applicants/table_applicants.sql | 2 +- src/sql/location/table_location.sql | 7 - src/sql/location/table_locations.sql | 10 ++ src/sql/members/table_members.sql | 2 +- src/sql/orders/table_orders.sql | 2 +- src/sql/supplies/table_supplies.sql | 2 +- 14 files changed, 589 insertions(+), 101 deletions(-) create mode 100644 src/scripts/helpers.py create mode 100644 src/scripts/test_tables.py delete mode 100644 src/sql/location/table_location.sql create mode 100644 src/sql/location/table_locations.sql diff --git a/Dockerfile b/Dockerfile index 323150f..a210162 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,9 +3,9 @@ FROM python:3.11-slim # Set working directory WORKDIR /app -# Install system dependencies for psycopg2 +# Install system dependencies for mysql-connector-python RUN apt-get update && apt-get install -y \ - gcc libpq-dev postgresql-client && \ + gcc libmariadb-dev pkg-config mariadb-client && \ rm -rf /var/lib/apt/lists/* # Install Python dependencies diff --git a/Makefile b/Makefile index 21c821d..72a2464 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -PROJECT_NAME=postgres_service +PROJECT_NAME=mysql_service COMPOSE=docker-compose -p $(PROJECT_NAME) -## up: Start the postgres and app containers in the background +## up: Start the mysql and app containers in the background .PHONY: up up: $(COMPOSE) up -d @@ -16,10 +16,15 @@ down: logs: $(COMPOSE) logs -f -## psql: Open a psql shell into the postgres container -.PHONY: psql -psql: - $(COMPOSE) exec db psql -U postgres -d mydb +## mysql: Open a mysql shell into the mysql container +.PHONY: mysql +mysql: + $(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb + +## test: Run table creation tests in a test database +.PHONY: test +test: + $(COMPOSE) exec app python src/scripts/test_tables.py ## build: Build or rebuild services .PHONY: build diff --git a/README.md b/README.md index 0914a52..51e7e98 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# PostgreSQL Service Setup +# MySQL Service Setup -This project provides a simple Dockerized PostgreSQL database and a Python service for interacting with it. +This project provides a simple Dockerized MySQL database and a Python service for interacting with it. --- @@ -17,3 +17,43 @@ This project provides a simple Dockerized PostgreSQL database and a Python servi ```bash make up ``` + +This will: +- Start a MySQL 8.0 database container +- Initialize the database schema (idempotent - safe to run multiple times) + +--- + +## Database Initialization + +The startup script (`src/scripts/app.py`) handles database schema initialization automatically. It: + +1. **Creates tables** in the correct dependency order (idempotent) +2. **Only creates tables that don't exist** - safe to run multiple times +3. **Never drops existing data** - production-safe + +### Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `DATABASE_URL` | `mysql://mysqluser:mysqlpassword@db:3306/mydb` | MySQL connection string | + +### Separate Scripts + +- **Seed data**: Use separate seed scripts (to be added) for inserting test data +- **Database reset**: Use separate reset scripts (to be added) for dropping/recreating tables + +--- + +## Database Schema + +The database includes the following tables: +- `teams` - Team definitions +- `locations` - Storage locations +- `members` - Team members +- `weekly_reports` - Member progress reports +- `supplies` - Inventory items +- `orders` - Purchase orders +- `applicants` - Applicant information + +See `src/sql/` for table definitions. \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index 1efd7c0..ff5a84a 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,27 +1,35 @@ services: db: - image: postgres:16 - container_name: postgres_db + image: mysql:8.0 + container_name: mysql_db restart: unless-stopped environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: mydb + MYSQL_ROOT_PASSWORD: rootpassword + MYSQL_DATABASE: mydb + MYSQL_USER: mysqluser + MYSQL_PASSWORD: mysqlpassword ports: - - "5432:5432" + - "3306:3306" volumes: - - db_data:/var/lib/postgresql/data + - db_data:/var/lib/mysql + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "root", "-prootpassword"] + interval: 10s + timeout: 5s + retries: 5 app: build: . - container_name: postgres_app + container_name: mysql_app depends_on: - - db + db: + condition: service_healthy environment: - DATABASE_URL: postgres://postgres:postgres@db:5432/mydb + DATABASE_URL: mysql://mysqluser:mysqlpassword@db:3306/mydb + MYSQL_ROOT_PASSWORD: rootpassword volumes: - .:/app - command: ["sh", "-c", "until pg_isready -h db -p 5432; do sleep 1; done && python src/scripts/app.py"] + command: ["sh", "-c", "until mysqladmin ping -h db -u mysqluser -pmysqlpassword --silent 2>/dev/null; do sleep 1; done && python src/scripts/app.py"] volumes: db_data: diff --git a/requirements.txt b/requirements.txt index b67da01..e4a79f9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -psycopg2-binary==2.9.9 +mysql-connector-python==8.2.0 sqlalchemy==2.0.30 alembic==1.13.2 diff --git a/src/scripts/app.py b/src/scripts/app.py index 8c24e0a..22e4638 100644 --- a/src/scripts/app.py +++ b/src/scripts/app.py @@ -1,80 +1,99 @@ -from pathlib import Path -import os -import psycopg2 +""" +Database initialization script. -DATABASE_URL = os.getenv("DATABASE_URL", "postgres://postgres:postgres@db:5432/mydb") +This script initializes the database schema (idempotent - safe to run multiple times). +It only creates tables if they don't already exist, making it safe for production use. -# Create Table SQL strings -table_members = Path("/app/src/sql/members/table_members.sql").read_text() -table_weekly_reports = Path("/app/src/sql/weekly_reports/table_weekly_reports.sql").read_text() -table_teams = Path("/app/src/sql/teams/table_teams.sql").read_text() -table_supplies = Path("/app/src/sql/supplies/table_supplies.sql").read_text() -table_orders = Path("/app/src/sql/orders/table_orders.sql").read_text() -table_applicants = Path("/app/src/sql/applicants/table_applicants.sql").read_text() -table_locations = Path("/app/src/sql/location/table_location.sql").read_text() -# Insert SQL strings -insert_member = Path("/app/src/sql/members/insert_member.sql").read_text() +Note: Seed data and database reset functionality are handled by separate scripts. -# Make table, log if success or failure -def make_table(cur, table_sql, table_name): - try: - cur.execute(table_sql) - except Exception as e: - print(f"Make {table_name} table failed.", e) - else: - print(f"Make {table_name} table succeeded.") +Usage: + python src/scripts/app.py + +Environment variables: + DATABASE_URL: MySQL connection string (default: mysql://mysqluser:mysqlpassword@db:3306/mydb) +""" +import os +import sys +import mysql.connector +from helpers import ( + get_sql_base_path, + discover_table_files, + topological_sort_tables, + table_exists, + execute_sql_file, + parse_database_url +) + +DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + +# Base path for SQL files (works in Docker and locally) +SQL_BASE_PATH = get_sql_base_path(__file__) -try: - conn = psycopg2.connect(DATABASE_URL) - cur = conn.cursor() - cur.execute("SELECT version();") - print("Postgres version:", cur.fetchone()) - # Drop old tables for testing purposes - cur.execute("DROP TABLE IF EXISTS members CASCADE;") - cur.execute("DROP TABLE IF EXISTS weekly_reports;") - cur.execute("DROP TABLE IF EXISTS teams;") - cur.execute("DROP TABLE IF EXISTS supplies;") - cur.execute("DROP TABLE IF EXISTS orders;") - cur.execute("DROP TABLE IF EXISTS applicants;") - cur.execute("DROP TABLE IF EXISTS locations;") - conn.commit() +def initialize_schema(cur): + """Initialize database schema in correct dependency order.""" + print("\n๐Ÿ“‹ Discovering table files...") + + # Discover all table_*.sql files recursively + table_files = discover_table_files(SQL_BASE_PATH) + + if not table_files: + print(f"โš  No table_*.sql files found in {SQL_BASE_PATH}") + return False + + print(f"โœ“ Found {len(table_files)} table file(s)") + + # Sort tables by dependency order + print("๐Ÿ“Š Analyzing dependencies...") + sorted_tables = topological_sort_tables(table_files) + + print("\n๐Ÿ“‹ Initializing database schema...") + + success_count = 0 + for table_name, sql_file in sorted_tables: + description = f"{table_name} table" + # Check if table already exists (for idempotency) + if table_exists(cur, table_name): + print(f"โŠ˜ {description} already exists, skipping") + continue + + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + print(f"\nโœ“ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") + return success_count > 0 - # Make SQL Tables - make_table(cur, table_teams, "teams") - make_table(cur, table_locations, "locations") - make_table(cur, table_members, "members") - make_table(cur, table_weekly_reports, "weekly_reports") - make_table(cur, table_supplies, "supplies") - make_table(cur, table_orders, "orders") - make_table(cur, table_applicants, "applicants") - # Insert a test member +def main(): + """Main initialization function.""" try: - cur.execute(insert_member, { - "first": "Albert", - "last": "Gator", - "ufid": "12345678", - "email": "albert.gator@ufl.edu", - "phone": "352-201-0001", - "team": "Mechanical", - "discord":"AlbertDiscord", - "github": "AlbertGithub", - "grad": "2026-05-01", - "join": "2024-09-01", - "leader": False, - }) + print("๐Ÿ”Œ Connecting to database...") + db_params = parse_database_url(DATABASE_URL) + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + + # Verify connection + cur.execute("SELECT VERSION();") + version = cur.fetchone()[0] + print(f"โœ“ Connected to MySQL: {version}") + + # Initialize schema + initialize_schema(cur) + conn.commit() + + print("\nโœ… Database initialization complete!") + + cur.close() + conn.close() + + except mysql.connector.Error as e: + print(f"โœ— Database connection failed: {e}") + sys.exit(1) except Exception as e: - print("Insert member failed:", e) - else: - print("Insert member succeeded.") + print(f"โœ— Unexpected error: {e}") + sys.exit(1) - # Finish up - print("Connection committing and then closing.") - conn.commit() - cur.close() - conn.close() -except Exception as e: - print("Database connection failed:", e) +if __name__ == "__main__": + main() diff --git a/src/scripts/helpers.py b/src/scripts/helpers.py new file mode 100644 index 0000000..db3e90a --- /dev/null +++ b/src/scripts/helpers.py @@ -0,0 +1,193 @@ +""" +Shared helper functions for database schema management. + +This module contains common utilities used by app.py and test_tables.py +for discovering, parsing, and managing database tables. +""" +from pathlib import Path +import re +from urllib.parse import urlparse +from collections import defaultdict, deque + + +def get_sql_base_path(script_file): + """ + Get the SQL base path, works in both Docker and local environments. + + Args: + script_file: The __file__ from the calling script + + Returns: + Path object pointing to the SQL directory + """ + if Path("/app/src/sql").exists(): + return Path("/app/src/sql") # Docker path + else: + # Local development path (relative to script location) + return Path(script_file).parent.parent / "sql" + + +def discover_table_files(sql_base_path): + """ + Recursively discover all table_*.sql files in the SQL directory. + Returns a list of (table_name, file_path) tuples. + """ + table_files = [] + if not sql_base_path.exists(): + return table_files + + # Recursively find all table_*.sql files + for sql_file in sql_base_path.rglob("table_*.sql"): + # Extract table name from filename: table_.sql -> + match = re.match(r"table_(.+)\.sql$", sql_file.name, re.IGNORECASE) + if match: + table_name = match.group(1) + table_files.append((table_name, sql_file)) + + return table_files + + +def extract_table_dependencies(sql_content, table_name): + """ + Extract foreign key dependencies from SQL content. + Returns a set of table names that this table depends on. + """ + dependencies = set() + # Look for REFERENCES table_name patterns (case insensitive) + # Matches: REFERENCES table_name, REFERENCES `table_name`, REFERENCES schema.table_name + pattern = r'REFERENCES\s+(?:`?(\w+)`?\.)?`?(\w+)`?' + matches = re.finditer(pattern, sql_content, re.IGNORECASE) + for match in matches: + ref_table = match.group(2) + if ref_table and ref_table.lower() != table_name.lower(): + dependencies.add(ref_table.lower()) + return dependencies + + +def topological_sort_tables(table_files): + """ + Sort tables in dependency order using topological sort. + Tables with no dependencies come first. + """ + # Build dependency graph + table_names = [name for name, _ in table_files] + dependencies = {} + table_file_map = {} + + for table_name, sql_file in table_files: + table_file_map[table_name.lower()] = (table_name, sql_file) + sql_content = sql_file.read_text() + deps = extract_table_dependencies(sql_content, table_name) + dependencies[table_name.lower()] = deps + + # Topological sort + in_degree = defaultdict(int) + graph = defaultdict(list) + + for table in table_names: + table_lower = table.lower() + in_degree[table_lower] = 0 + + for table in table_names: + table_lower = table.lower() + for dep in dependencies[table_lower]: + if dep in [t.lower() for t in table_names]: + graph[dep].append(table_lower) + in_degree[table_lower] += 1 + + # Kahn's algorithm + queue = deque([t for t in [tn.lower() for tn in table_names] if in_degree[t] == 0]) + sorted_tables = [] + + while queue: + current = queue.popleft() + if current in table_file_map: + sorted_tables.append(table_file_map[current]) + for neighbor in graph[current]: + in_degree[neighbor] -= 1 + if in_degree[neighbor] == 0: + queue.append(neighbor) + + # Add any remaining tables (shouldn't happen if no cycles, but handle gracefully) + for table_name, sql_file in table_files: + table_lower = table_name.lower() + if (table_name, sql_file) not in sorted_tables: + sorted_tables.append((table_name, sql_file)) + + return sorted_tables + + +def parse_database_url(url): + """Parse MySQL connection URL and return connection parameters.""" + parsed = urlparse(url) + return { + 'host': parsed.hostname, + 'port': parsed.port or 3306, + 'user': parsed.username, + 'password': parsed.password, + 'database': parsed.path.lstrip('/') + } + + +def table_exists(cur, table_name, database_name=None): + """ + Check if a table exists in the database. + + Args: + cur: Database cursor + table_name: Name of the table to check + database_name: Optional database name (if None, uses current database) + """ + if database_name: + cur.execute( + "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = %s AND table_name = %s", + (database_name, table_name) + ) + else: + cur.execute( + "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = %s", + (table_name,) + ) + return cur.fetchone()[0] > 0 + + +def get_table_columns(cur, table_name, database_name=None): + """ + Get column information for a table. + + Args: + cur: Database cursor + table_name: Name of the table + database_name: Optional database name (if None, uses current database) + + Returns: + List of tuples with column information + """ + if database_name: + cur.execute( + "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_KEY FROM information_schema.columns WHERE table_schema = %s AND table_name = %s ORDER BY ORDINAL_POSITION", + (database_name, table_name) + ) + else: + cur.execute( + "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_KEY FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = %s ORDER BY ORDINAL_POSITION", + (table_name,) + ) + return cur.fetchall() + + +def execute_sql_file(cur, sql_file_path, description): + """Execute a SQL file and handle errors gracefully.""" + try: + sql_content = sql_file_path.read_text() + # Split by semicolons to handle multiple statements + statements = [s.strip() for s in sql_content.split(';') if s.strip()] + for statement in statements: + if statement: + cur.execute(statement) + print(f"โœ“ {description} succeeded") + return True + except Exception as e: + print(f"โœ— {description} failed: {e}") + return False + diff --git a/src/scripts/test_tables.py b/src/scripts/test_tables.py new file mode 100644 index 0000000..53a9ed5 --- /dev/null +++ b/src/scripts/test_tables.py @@ -0,0 +1,220 @@ +""" +Test script to validate table creation in a test database. + +This script creates a separate test database, initializes all tables, +and verifies they were created correctly. It does not affect the production database. + +Usage: + python src/scripts/test_tables.py + +Environment variables: + DATABASE_URL: MySQL connection string (default: mysql://mysqluser:mysqlpassword@db:3306/mydb) + The test will use a database named '{database_name}_test' +""" +import os +import sys +import mysql.connector +from helpers import ( + get_sql_base_path, + discover_table_files, + topological_sort_tables, + table_exists, + get_table_columns, + execute_sql_file, + parse_database_url +) + +# Get base database URL, test will use a separate database +BASE_DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + +# Base path for SQL files (works in Docker and locally) +SQL_BASE_PATH = get_sql_base_path(__file__) + + +def initialize_schema(cur, database_name=None): + """Initialize database schema in correct dependency order.""" + print("\n๐Ÿ“‹ Discovering table files...") + + # Discover all table_*.sql files recursively + table_files = discover_table_files(SQL_BASE_PATH) + + if not table_files: + print(f"โš  No table_*.sql files found in {SQL_BASE_PATH}") + return 0, 0 + + print(f"โœ“ Found {len(table_files)} table file(s)") + + # Sort tables by dependency order + print("๐Ÿ“Š Analyzing dependencies...") + sorted_tables = topological_sort_tables(table_files) + + print("\n๐Ÿ“‹ Initializing database schema...") + + success_count = 0 + for table_name, sql_file in sorted_tables: + description = f"{table_name} table" + # Check if table already exists (for idempotency) + if table_exists(cur, table_name, database_name): + print(f"โŠ˜ {description} already exists, skipping") + continue + + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + print(f"\nโœ“ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") + return success_count, len(sorted_tables) + + +def verify_tables(cur, database_name): + """Verify that all expected tables exist and have valid structure.""" + print("\n๐Ÿ” Verifying table structure...") + + # Discover all table files to get expected table names + table_files = discover_table_files(SQL_BASE_PATH) + expected_tables = [name for name, _ in table_files] + + if not expected_tables: + print("โš  No tables found to verify") + return False + + all_valid = True + for table_name in expected_tables: + if table_exists(cur, table_name, database_name): + columns = get_table_columns(cur, table_name, database_name) + print(f"โœ“ {table_name}: {len(columns)} columns") + if len(columns) == 0: + print(f" โš  Warning: {table_name} has no columns") + all_valid = False + else: + print(f"โœ— {table_name}: NOT FOUND") + all_valid = False + + return all_valid + + +def main(): + """Main test function.""" + try: + # Parse base database URL + base_params = parse_database_url(BASE_DATABASE_URL) + test_db_name = f"{base_params['database']}_test" + + print(f"๐Ÿงช Starting table validation test...") + print(f"๐Ÿ“Š Test database: {test_db_name}") + print(f"๐Ÿ”Œ Connecting to MySQL server...") + + # Use root credentials to create database (mysqluser may not have CREATE permission) + # Try to get root password from environment or use default + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root to create database + root_conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': 'root', + 'password': root_password + } + + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + + # Verify connection + root_cur.execute("SELECT VERSION();") + version = root_cur.fetchone()[0] + print(f"โœ“ Connected to MySQL: {version}") + + # Create test database if it doesn't exist + print(f"\n๐Ÿ“ฆ Creating test database '{test_db_name}'...") + root_cur.execute(f"CREATE DATABASE IF NOT EXISTS `{test_db_name}`") + + # Grant permissions to mysqluser on the test database + # Use string formatting for GRANT since it's DDL, not DML + username = base_params['user'] + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + print(f"โœ“ Test database created and permissions granted") + + root_cur.close() + root_conn.close() + + # Small delay to ensure privileges are propagated + import time + time.sleep(0.5) + + # Now connect as mysqluser for table operations + print(f"๐Ÿ”Œ Connecting as '{base_params['user']}' to test database...") + conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': base_params['user'], + 'password': base_params['password'], + 'database': test_db_name + } + + try: + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"โœ“ Connected successfully") + except mysql.connector.Error as e: + print(f"โœ— Failed to connect as {base_params['user']}: {e}") + print(f" Attempting to verify privileges...") + # Try to reconnect as root to check if user exists + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"SELECT User, Host FROM mysql.user WHERE User = '{username}'") + users = root_cur.fetchall() + if not users: + print(f" โš  User '{username}' does not exist. Creating user...") + root_cur.execute(f"CREATE USER IF NOT EXISTS '{username}'@'%' IDENTIFIED BY '{base_params['password']}'") + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + root_cur.close() + root_conn.close() + time.sleep(0.5) + # Retry connection + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"โœ“ Connected successfully after creating user") + else: + raise + + # Initialize schema in test database + created_count, total_count = initialize_schema(cur, test_db_name) + conn.commit() + + # Verify tables + all_valid = verify_tables(cur, test_db_name) + + # Summary + print("\n" + "="*50) + if all_valid and created_count == total_count: + print("โœ… TEST PASSED: All tables created and verified successfully!") + print(f" Created {created_count}/{total_count} tables") + print(f" Test database '{test_db_name}' is ready for inspection") + print(f" (Database will be kept for manual inspection)") + elif all_valid: + print("โš ๏ธ TEST PARTIAL: All tables exist but some were skipped") + print(f" Created {created_count}/{total_count} tables") + else: + print("โŒ TEST FAILED: Some tables are missing or invalid") + sys.exit(1) + print("="*50) + + cur.close() + conn.close() + + except mysql.connector.Error as e: + print(f"โœ— Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"โœ— Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + main() + diff --git a/src/sql/applicants/table_applicants.sql b/src/sql/applicants/table_applicants.sql index 6642fc6..c9ea838 100644 --- a/src/sql/applicants/table_applicants.sql +++ b/src/sql/applicants/table_applicants.sql @@ -1,7 +1,7 @@ CREATE TABLE applicants ( first_name VARCHAR(100) NOT NULL, last_name VARCHAR(100) NOT NULL, - uf_id CHAR(8) NOT NULL CHECK (uf_id ~ '^[0-9]{8}$') PRIMARY KEY, + uf_id CHAR(8) NOT NULL CHECK (uf_id REGEXP '^[0-9]{8}$') PRIMARY KEY, discord_user VARCHAR(150), github_user VARCHAR(150), qualtrics_link VARCHAR(200) diff --git a/src/sql/location/table_location.sql b/src/sql/location/table_location.sql deleted file mode 100644 index 7505f60..0000000 --- a/src/sql/location/table_location.sql +++ /dev/null @@ -1,7 +0,0 @@ --- TODO: Add encoding for actual position coordinates later --- Or alternatively, short instructions on how to find the location in the lab --- Or both -CREATE TABLE locations ( - name VARCHAR(50) PRIMARY KEY -); - diff --git a/src/sql/location/table_locations.sql b/src/sql/location/table_locations.sql new file mode 100644 index 0000000..2221ae7 --- /dev/null +++ b/src/sql/location/table_locations.sql @@ -0,0 +1,10 @@ +-- Location has coordinates that will be used to position it in the svg in the frontend. +CREATE TABLE locations ( + name VARCHAR(50) PRIMARY KEY, + x INTEGER NOT NULL, + y INTEGER NOT NULL, + width INTEGER NOT NULL, + height INTEGER NOT NULL, + type VARCHAR(50) NOT NULL +); + diff --git a/src/sql/members/table_members.sql b/src/sql/members/table_members.sql index 35a2266..a5baaa7 100644 --- a/src/sql/members/table_members.sql +++ b/src/sql/members/table_members.sql @@ -1,7 +1,7 @@ CREATE TABLE members ( first_name VARCHAR(100) NOT NULL, last_name VARCHAR(100) NOT NULL, - uf_id CHAR(8) NOT NULL CHECK (uf_id ~ '^[0-9]{8}$') PRIMARY KEY, + uf_id CHAR(8) NOT NULL CHECK (uf_id REGEXP '^[0-9]{8}$') PRIMARY KEY, uf_email VARCHAR(150) NOT NULL UNIQUE, phone_number VARCHAR(50), team VARCHAR(50) REFERENCES teams(name), diff --git a/src/sql/orders/table_orders.sql b/src/sql/orders/table_orders.sql index 2c22e36..4a2fb0b 100644 --- a/src/sql/orders/table_orders.sql +++ b/src/sql/orders/table_orders.sql @@ -1,5 +1,5 @@ CREATE TABLE IF NOT EXISTS orders ( - order_id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + order_id BIGINT AUTO_INCREMENT PRIMARY KEY, item_name VARCHAR(50), count INT, company VARCHAR(50), diff --git a/src/sql/supplies/table_supplies.sql b/src/sql/supplies/table_supplies.sql index b5ea04f..a0e3691 100644 --- a/src/sql/supplies/table_supplies.sql +++ b/src/sql/supplies/table_supplies.sql @@ -1,5 +1,5 @@ CREATE TABLE supplies ( - id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + id BIGINT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(200) NOT NULL, amount INTEGER NOT NULL DEFAULT 0, last_order_date DATE, From 213a1676ca8b240955cf0c8b5dbd6e999219a8de Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 11:19:42 -0500 Subject: [PATCH 14/20] test_tables drops all tables --- src/scripts/test_tables.py | 39 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/src/scripts/test_tables.py b/src/scripts/test_tables.py index 53a9ed5..f8faf4c 100644 --- a/src/scripts/test_tables.py +++ b/src/scripts/test_tables.py @@ -65,6 +65,41 @@ def initialize_schema(cur, database_name=None): return success_count, len(sorted_tables) +def drop_all_tables(cur, database_name): + """Drop all tables from the test database.""" + print(f"\n๐Ÿ—‘๏ธ Dropping all existing tables from '{database_name}'...") + + # Get all table names from the database + cur.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = %s", + (database_name,) + ) + tables = cur.fetchall() + + if not tables: + print(" โŠ˜ No tables to drop") + return + + table_names = [table[0] for table in tables] + print(f" Found {len(table_names)} table(s) to drop") + + # Disable foreign key checks to avoid constraint issues + cur.execute("SET FOREIGN_KEY_CHECKS = 0") + + dropped_count = 0 + for table_name in table_names: + try: + cur.execute(f"DROP TABLE IF EXISTS `{table_name}`") + dropped_count += 1 + except Exception as e: + print(f" โš  Warning: Failed to drop table '{table_name}': {e}") + + # Re-enable foreign key checks + cur.execute("SET FOREIGN_KEY_CHECKS = 1") + + print(f" โœ“ Dropped {dropped_count}/{len(table_names)} table(s)") + + def verify_tables(cur, database_name): """Verify that all expected tables exist and have valid structure.""" print("\n๐Ÿ” Verifying table structure...") @@ -180,6 +215,10 @@ def main(): else: raise + # Drop all existing tables from test database for a clean start + drop_all_tables(cur, test_db_name) + conn.commit() + # Initialize schema in test database created_count, total_count = initialize_schema(cur, test_db_name) conn.commit() From 2ce730e4f24cadc2b69a14e942ca66c07c7ec6e5 Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 12:56:04 -0500 Subject: [PATCH 15/20] Added visual test gui to run tests in --- .gitignore | 1 + Dockerfile | 5 +- Makefile | 15 +- docker-compose.yaml | 15 +- requirements.txt | 2 + src/api/__init__.py | 2 + src/api/app.py | 100 +++++ src/api/db.py | 41 ++ src/api/models/__init__.py | 2 + src/api/models/location.py | 73 ++++ src/api/models/supply.py | 59 +++ src/api/routes/__init__.py | 2 + src/api/routes/locations.py | 220 ++++++++++ src/api/routes/supplies.py | 395 ++++++++++++++++++ src/scripts/app.py | 99 ----- src/scripts/test_gui.py | 520 ++++++++++++++++++++++++ src/scripts/test_locations_supplies.py | 542 +++++++++++++++++++++++++ src/scripts/test_tables.py | 43 +- src/sql/supplies/table_supplies.sql | 3 +- 19 files changed, 2029 insertions(+), 110 deletions(-) create mode 100644 src/api/__init__.py create mode 100644 src/api/app.py create mode 100644 src/api/db.py create mode 100644 src/api/models/__init__.py create mode 100644 src/api/models/location.py create mode 100644 src/api/models/supply.py create mode 100644 src/api/routes/__init__.py create mode 100644 src/api/routes/locations.py create mode 100644 src/api/routes/supplies.py delete mode 100644 src/scripts/app.py create mode 100644 src/scripts/test_gui.py create mode 100644 src/scripts/test_locations_supplies.py diff --git a/.gitignore b/.gitignore index 5ceb386..d75edea 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ venv +__pycache__ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index a210162..4d77501 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,9 +3,10 @@ FROM python:3.11-slim # Set working directory WORKDIR /app -# Install system dependencies for mysql-connector-python +# Install system dependencies for mysql-connector-python and tkinter RUN apt-get update && apt-get install -y \ - gcc libmariadb-dev pkg-config mariadb-client && \ + gcc libmariadb-dev pkg-config mariadb-client \ + python3-tk xvfb && \ rm -rf /var/lib/apt/lists/* # Install Python dependencies diff --git a/Makefile b/Makefile index 72a2464..f167343 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ PROJECT_NAME=mysql_service COMPOSE=docker-compose -p $(PROJECT_NAME) -## up: Start the mysql and app containers in the background +## up: Start the mysql and api containers in the background .PHONY: up up: $(COMPOSE) up -d @@ -11,6 +11,11 @@ up: down: $(COMPOSE) down +## status: Show status of all services +.PHONY: status +status: + $(COMPOSE) ps + ## logs: Follow logs from all services .PHONY: logs logs: @@ -21,10 +26,14 @@ logs: mysql: $(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb -## test: Run table creation tests in a test database +## test: Open GUI test runner (starts services if not running) .PHONY: test test: - $(COMPOSE) exec app python src/scripts/test_tables.py + @echo "Ensuring services are running..." + @$(COMPOSE) up -d + @echo "Waiting for services to be ready..." + @timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true + @python src/scripts/test_gui.py ## build: Build or rebuild services .PHONY: build diff --git a/docker-compose.yaml b/docker-compose.yaml index ff5a84a..a8b43ac 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,18 +18,25 @@ services: timeout: 5s retries: 5 - app: + api: build: . - container_name: mysql_app + container_name: mysql_api depends_on: db: condition: service_healthy environment: DATABASE_URL: mysql://mysqluser:mysqlpassword@db:3306/mydb - MYSQL_ROOT_PASSWORD: rootpassword + DB_HOST: db + DB_PORT: 3306 + DB_USER: mysqluser + DB_PASSWORD: mysqlpassword + DB_NAME: mydb + PORT: 5000 + ports: + - "5000:5000" volumes: - .:/app - command: ["sh", "-c", "until mysqladmin ping -h db -u mysqluser -pmysqlpassword --silent 2>/dev/null; do sleep 1; done && python src/scripts/app.py"] + command: ["python", "-m", "src.api.app"] volumes: db_data: diff --git a/requirements.txt b/requirements.txt index e4a79f9..b32c22c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,6 @@ mysql-connector-python==8.2.0 sqlalchemy==2.0.30 alembic==1.13.2 +flask==3.0.0 +flask-cors==4.0.0 diff --git a/src/api/__init__.py b/src/api/__init__.py new file mode 100644 index 0000000..4b449e7 --- /dev/null +++ b/src/api/__init__.py @@ -0,0 +1,2 @@ +# API package + diff --git a/src/api/app.py b/src/api/app.py new file mode 100644 index 0000000..517c3f5 --- /dev/null +++ b/src/api/app.py @@ -0,0 +1,100 @@ +""" +Flask API application for mil-sql. +""" +import os +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from flask import Flask +from flask_cors import CORS +from src.api.routes.locations import locations_bp +from src.api.routes.supplies import supplies_bp + +# Import helpers for schema initialization +from src.scripts.helpers import ( + get_sql_base_path, + discover_table_files, + topological_sort_tables, + table_exists, + execute_sql_file +) +from src.api.db import get_db + +app = Flask(__name__) +CORS(app) # Enable CORS for milventory frontend + +# Register blueprints +app.register_blueprint(locations_bp, url_prefix='/api/locations') +app.register_blueprint(supplies_bp, url_prefix='/api/supplies') + + +def initialize_schema(): + """Initialize database schema if tables are missing.""" + try: + print("๐Ÿ” Checking database schema...") + conn = get_db() + cur = conn.cursor() + + # Get SQL base path + SQL_BASE_PATH = get_sql_base_path(__file__) + + # Discover all table files + table_files = discover_table_files(SQL_BASE_PATH) + + if not table_files: + print("โš  No table_*.sql files found, skipping initialization") + cur.close() + conn.close() + return + + # Sort tables by dependency order + sorted_tables = topological_sort_tables(table_files) + + # Check which tables are missing + missing_tables = [] + for table_name, sql_file in sorted_tables: + if not table_exists(cur, table_name): + missing_tables.append((table_name, sql_file)) + + if not missing_tables: + print("โœ“ All tables exist, schema is up to date") + cur.close() + conn.close() + return + + # Create missing tables + print(f"๐Ÿ“‹ Creating {len(missing_tables)} missing table(s)...") + success_count = 0 + for table_name, sql_file in missing_tables: + description = f"{table_name} table" + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + conn.commit() + print(f"โœ“ Schema initialization complete ({success_count}/{len(missing_tables)} tables created)") + + cur.close() + conn.close() + + except Exception as e: + print(f"โš  Schema initialization warning: {e}") + print(" API will continue, but some endpoints may not work until tables are created") + + +# Initialize schema on startup +initialize_schema() + + +@app.route('/health', methods=['GET']) +def health_check(): + """Health check endpoint.""" + return {'status': 'healthy'}, 200 + + +if __name__ == '__main__': + port = int(os.getenv('PORT', 5000)) + app.run(host='0.0.0.0', port=port, debug=True) + diff --git a/src/api/db.py b/src/api/db.py new file mode 100644 index 0000000..958f5ee --- /dev/null +++ b/src/api/db.py @@ -0,0 +1,41 @@ +""" +Database connection pool for the API. +""" +import mysql.connector +from mysql.connector import pooling +import os + +# Database configuration from environment variables +config = { + 'host': os.getenv('DB_HOST', 'db'), + 'port': int(os.getenv('DB_PORT', 3306)), + 'user': os.getenv('DB_USER', 'mysqluser'), + 'password': os.getenv('DB_PASSWORD', 'mysqlpassword'), + 'database': os.getenv('DB_NAME', 'mydb'), + 'pool_name': 'mil_sql_pool', + 'pool_size': 5, + 'pool_reset_session': True +} + +# Create connection pool +try: + connection_pool = pooling.MySQLConnectionPool(**config) +except Exception as e: + print(f"Error creating connection pool: {e}") + connection_pool = None + + +def get_db(): + """ + Get a database connection from the pool. + + Returns: + mysql.connector.connection.MySQLConnection: Database connection + + Raises: + Exception: If connection pool is not initialized + """ + if connection_pool is None: + raise Exception("Database connection pool not initialized") + return connection_pool.get_connection() + diff --git a/src/api/models/__init__.py b/src/api/models/__init__.py new file mode 100644 index 0000000..4efde13 --- /dev/null +++ b/src/api/models/__init__.py @@ -0,0 +1,2 @@ +# Models package + diff --git a/src/api/models/location.py b/src/api/models/location.py new file mode 100644 index 0000000..bb37a6f --- /dev/null +++ b/src/api/models/location.py @@ -0,0 +1,73 @@ +""" +Location model for API responses. +""" +from dataclasses import dataclass +from typing import Optional, Dict, Any + + +@dataclass +class Location: + """Location model representing a storage location.""" + name: str + x: int + y: int + width: int + height: int + type: str + + @classmethod + def from_db_row(cls, row: tuple) -> 'Location': + """ + Create Location from database row. + + Args: + row: Tuple from database query (name, x, y, width, height, type) + + Returns: + Location instance + """ + return cls( + name=row[0], + x=row[1], + y=row[2], + width=row[3], + height=row[4], + type=row[5] + ) + + def to_dict(self) -> Dict[str, Any]: + """ + Convert Location to dictionary for JSON serialization. + + Returns: + Dictionary representation of the location + """ + return { + 'name': self.name, + 'x': self.x, + 'y': self.y, + 'width': self.width, + 'height': self.height, + 'type': self.type + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> 'Location': + """ + Create Location from dictionary. + + Args: + data: Dictionary with location data + + Returns: + Location instance + """ + return cls( + name=data['name'], + x=int(data['x']), + y=int(data['y']), + width=int(data['width']), + height=int(data['height']), + type=data['type'] + ) + diff --git a/src/api/models/supply.py b/src/api/models/supply.py new file mode 100644 index 0000000..7d40dcd --- /dev/null +++ b/src/api/models/supply.py @@ -0,0 +1,59 @@ +""" +Supply data model. +""" +from dataclasses import dataclass +from typing import Optional +from datetime import date + + +@dataclass +class Supply: + """Supply inventory entry model.""" + id: Optional[int] = None + name: str = "" + amount: int = 0 + last_order_date: Optional[date] = None + location: str = "" + + @classmethod + def from_db_row(cls, row): + """Create Supply from database row tuple. + + Args: + row: Tuple from database query (id, name, amount, last_order_date, location) + """ + return cls( + id=row[0], + name=row[1], + amount=row[2], + last_order_date=row[3], + location=row[4] + ) + + @classmethod + def from_dict(cls, data: dict): + """Create Supply from dictionary. + + Args: + data: Dictionary with supply fields + """ + return cls( + id=data.get('id'), + name=data.get('name', ''), + amount=data.get('amount', 0), + last_order_date=data.get('last_order_date'), + location=data.get('location', '') + ) + + def to_dict(self): + """Convert Supply to dictionary.""" + result = { + 'id': self.id, + 'name': self.name, + 'amount': self.amount, + 'location': self.location + } + if self.last_order_date: + result['last_order_date'] = self.last_order_date.isoformat() if hasattr(self.last_order_date, 'isoformat') else str(self.last_order_date) + return result + diff --git a/src/api/routes/__init__.py b/src/api/routes/__init__.py new file mode 100644 index 0000000..fb42c31 --- /dev/null +++ b/src/api/routes/__init__.py @@ -0,0 +1,2 @@ +# Routes package + diff --git a/src/api/routes/locations.py b/src/api/routes/locations.py new file mode 100644 index 0000000..aba9424 --- /dev/null +++ b/src/api/routes/locations.py @@ -0,0 +1,220 @@ +""" +Location API routes. +""" +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + +from flask import Blueprint, request, jsonify +import mysql.connector +from src.api.db import get_db +from src.api.models.location import Location + +locations_bp = Blueprint('locations', __name__) + + +@locations_bp.route('', methods=['GET']) +def get_locations(): + """ + GET /api/locations + Get all locations. + + Returns: + JSON array of all locations + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute("SELECT name, x, y, width, height, type FROM locations ORDER BY name") + rows = cur.fetchall() + locations = [Location.from_db_row(row).to_dict() for row in rows] + cur.close() + conn.close() + return jsonify(locations), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['GET']) +def get_location(name): + """ + GET /api/locations/ + Get a specific location by name. + + Args: + name: Location name + + Returns: + JSON object of the location or 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,)) + row = cur.fetchone() + cur.close() + conn.close() + + if row: + location = Location.from_db_row(row).to_dict() + return jsonify(location), 200 + else: + return jsonify({'error': 'Location not found'}), 404 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('', methods=['POST']) +def create_location(): + """ + POST /api/locations + Create a new location. + + Request body: + { + "name": "string", + "x": int, + "y": int, + "width": int, + "height": int, + "type": "string" + } + + Returns: + JSON object of the created location + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'x', 'y', 'width', 'height', 'type'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + location = Location.from_dict(data) + + conn = get_db() + cur = conn.cursor() + cur.execute( + "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", + (location.name, location.x, location.y, location.width, location.height, location.type) + ) + conn.commit() + cur.close() + conn.close() + + return jsonify(location.to_dict()), 201 + except mysql.connector.IntegrityError as e: + if 'Duplicate entry' in str(e): + return jsonify({'error': 'Location with this name already exists'}), 409 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['PUT']) +def update_location(name): + """ + PUT /api/locations/ + Update an existing location. + + Args: + name: Location name to update + + Request body: + { + "x": int, + "y": int, + "width": int, + "height": int, + "type": "string" + } + Note: name cannot be updated via PUT + + Returns: + JSON object of the updated location + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate fields (name is not updatable via PUT) + updatable_fields = ['x', 'y', 'width', 'height', 'type'] + update_data = {k: v for k, v in data.items() if k in updatable_fields} + + if not update_data: + return jsonify({'error': 'No valid fields to update'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if location exists + cur.execute("SELECT name FROM locations WHERE name = %s", (name,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Location not found'}), 404 + + # Build update query dynamically + set_clauses = [] + values = [] + for field, value in update_data.items(): + set_clauses.append(f"{field} = %s") + values.append(value) + values.append(name) + + query = f"UPDATE locations SET {', '.join(set_clauses)} WHERE name = %s" + cur.execute(query, values) + conn.commit() + + # Fetch updated location + cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,)) + row = cur.fetchone() + location = Location.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(location), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@locations_bp.route('/', methods=['DELETE']) +def delete_location(name): + """ + DELETE /api/locations/ + Delete a location. + + Args: + name: Location name to delete + + Returns: + 204 No Content on success, 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + + # Check if location exists + cur.execute("SELECT name FROM locations WHERE name = %s", (name,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Location not found'}), 404 + + cur.execute("DELETE FROM locations WHERE name = %s", (name,)) + conn.commit() + cur.close() + conn.close() + + return '', 204 + except Exception as e: + return jsonify({'error': str(e)}), 500 + diff --git a/src/api/routes/supplies.py b/src/api/routes/supplies.py new file mode 100644 index 0000000..d320d67 --- /dev/null +++ b/src/api/routes/supplies.py @@ -0,0 +1,395 @@ +""" +Supply API routes. +""" +import sys +from pathlib import Path + +# Add src to path for imports (must be before other imports) +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + +from flask import Blueprint, request, jsonify +import mysql.connector +from src.api.db import get_db +from src.api.models.supply import Supply + +supplies_bp = Blueprint('supplies', __name__) + + +@supplies_bp.route('', methods=['GET']) +def get_supplies(): + """ + GET /api/supplies + Get all supplies, optionally filtered by location. + + Query parameters: + location: Optional location name to filter by + + Returns: + JSON array of all supplies + """ + try: + location_filter = request.args.get('location') + + conn = get_db() + cur = conn.cursor() + + if location_filter: + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE location = %s ORDER BY name", + (location_filter,) + ) + else: + cur.execute("SELECT id, name, amount, last_order_date, location FROM supplies ORDER BY name, location") + + rows = cur.fetchall() + supplies = [Supply.from_db_row(row).to_dict() for row in rows] + cur.close() + conn.close() + return jsonify(supplies), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['GET']) +def get_supply(supply_id): + """ + GET /api/supplies/ + Get a specific supply by ID. + + Args: + supply_id: Supply ID + + Returns: + JSON object of the supply or 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + cur.close() + conn.close() + + if row: + supply = Supply.from_db_row(row).to_dict() + return jsonify(supply), 200 + else: + return jsonify({'error': 'Supply not found'}), 404 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('', methods=['POST']) +def create_supply(): + """ + POST /api/supplies + Create a new supply entry. + + Request body: + { + "name": "string", + "amount": int, + "last_order_date": "YYYY-MM-DD" (optional), + "location": "string" + } + + Returns: + JSON object of the created supply + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'amount', 'location'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + if data['amount'] < 0: + return jsonify({'error': 'Amount cannot be negative'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if supply already exists at this location + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['location']) + ) + existing = cur.fetchone() + + if existing: + # Update existing entry by adding to amount + new_amount = existing[1] + data['amount'] + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_amount, existing[0]) + ) + supply_id = existing[0] + else: + # Create new entry + cur.execute( + "INSERT INTO supplies (name, amount, last_order_date, location) VALUES (%s, %s, %s, %s)", + (data['name'], data['amount'], data.get('last_order_date'), data['location']) + ) + supply_id = cur.lastrowid + + conn.commit() + + # Fetch the created/updated supply + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + supply = Supply.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(supply), 201 + except mysql.connector.IntegrityError as e: + if 'foreign key constraint' in str(e).lower(): + return jsonify({'error': 'Location does not exist'}), 400 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['PUT']) +def update_supply(supply_id): + """ + PUT /api/supplies/ + Update an existing supply entry. + + Args: + supply_id: Supply ID to update + + Request body: + { + "amount": int (optional), + "last_order_date": "YYYY-MM-DD" (optional) + } + Note: name and location cannot be updated via PUT (use move endpoint) + + Returns: + JSON object of the updated supply + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate fields (name and location are not updatable via PUT) + updatable_fields = ['amount', 'last_order_date'] + update_data = {k: v for k, v in data.items() if k in updatable_fields} + + if not update_data: + return jsonify({'error': 'No valid fields to update'}), 400 + + if 'amount' in update_data and update_data['amount'] < 0: + return jsonify({'error': 'Amount cannot be negative'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Check if supply exists + cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Supply not found'}), 404 + + # Build update query dynamically + set_clauses = [] + values = [] + for field, value in update_data.items(): + set_clauses.append(f"{field} = %s") + values.append(value) + values.append(supply_id) + + query = f"UPDATE supplies SET {', '.join(set_clauses)} WHERE id = %s" + cur.execute(query, values) + conn.commit() + + # Fetch updated supply + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE id = %s", + (supply_id,) + ) + row = cur.fetchone() + supply = Supply.from_db_row(row).to_dict() + + cur.close() + conn.close() + + return jsonify(supply), 200 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/', methods=['DELETE']) +def delete_supply(supply_id): + """ + DELETE /api/supplies/ + Delete a supply entry. + + Args: + supply_id: Supply ID to delete + + Returns: + 204 No Content on success, 404 if not found + """ + try: + conn = get_db() + cur = conn.cursor() + + # Check if supply exists + cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,)) + if not cur.fetchone(): + cur.close() + conn.close() + return jsonify({'error': 'Supply not found'}), 404 + + cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) + conn.commit() + cur.close() + conn.close() + + return '', 204 + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@supplies_bp.route('/move', methods=['POST']) +def move_supplies(): + """ + POST /api/supplies/move + Move supplies from one location to another. + + Request body: + { + "name": "string", # Supply name + "from_location": "string", # Source location + "to_location": "string", # Destination location + "amount": int # Amount to move (optional, defaults to all) + } + + Returns: + JSON object with move results: + { + "moved": int, # Amount actually moved + "from_remaining": int, # Remaining at source + "to_total": int # Total at destination after move + } + """ + try: + data = request.json + if not data: + return jsonify({'error': 'Request body is required'}), 400 + + # Validate required fields + required_fields = ['name', 'from_location', 'to_location'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'Missing required field: {field}'}), 400 + + if data['from_location'] == data['to_location']: + return jsonify({'error': 'Source and destination locations must be different'}), 400 + + amount_to_move = data.get('amount') # None means move all + + if amount_to_move is not None and amount_to_move <= 0: + return jsonify({'error': 'Amount to move must be positive'}), 400 + + conn = get_db() + cur = conn.cursor() + + # Get source supply entry + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['from_location']) + ) + source_entry = cur.fetchone() + + if not source_entry: + cur.close() + conn.close() + return jsonify({'error': f'Supply "{data["name"]}" not found at location "{data["from_location"]}"'}), 404 + + source_id, source_amount = source_entry + + # Determine how much to move + if amount_to_move is None: + amount_to_move = source_amount # Move all + elif amount_to_move > source_amount: + cur.close() + conn.close() + return jsonify({ + 'error': f'Cannot move {amount_to_move} units. Only {source_amount} available at source location' + }), 400 + + # Get destination supply entry (if exists) + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (data['name'], data['to_location']) + ) + dest_entry = cur.fetchone() + + # Calculate new amounts + new_source_amount = source_amount - amount_to_move + if dest_entry: + # Destination exists - add to it + dest_id, dest_amount = dest_entry + new_dest_amount = dest_amount + amount_to_move + + # Update destination + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_dest_amount, dest_id) + ) + else: + # Destination doesn't exist - create new entry + cur.execute( + "INSERT INTO supplies (name, amount, location) VALUES (%s, %s, %s)", + (data['name'], amount_to_move, data['to_location']) + ) + new_dest_amount = amount_to_move + + # Update or delete source + if new_source_amount > 0: + # Update source with remaining amount + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_source_amount, source_id) + ) + else: + # Delete source entry if amount becomes 0 + cur.execute("DELETE FROM supplies WHERE id = %s", (source_id,)) + + conn.commit() + + result = { + 'moved': amount_to_move, + 'from_remaining': new_source_amount, + 'to_total': new_dest_amount + } + + cur.close() + conn.close() + + return jsonify(result), 200 + except mysql.connector.IntegrityError as e: + if 'foreign key constraint' in str(e).lower(): + return jsonify({'error': 'Location does not exist'}), 400 + if 'unique_supply_location' in str(e).lower(): + return jsonify({'error': 'Supply already exists at destination (this should not happen)'}), 500 + return jsonify({'error': str(e)}), 400 + except Exception as e: + return jsonify({'error': str(e)}), 500 + diff --git a/src/scripts/app.py b/src/scripts/app.py deleted file mode 100644 index 22e4638..0000000 --- a/src/scripts/app.py +++ /dev/null @@ -1,99 +0,0 @@ -""" -Database initialization script. - -This script initializes the database schema (idempotent - safe to run multiple times). -It only creates tables if they don't already exist, making it safe for production use. - -Note: Seed data and database reset functionality are handled by separate scripts. - -Usage: - python src/scripts/app.py - -Environment variables: - DATABASE_URL: MySQL connection string (default: mysql://mysqluser:mysqlpassword@db:3306/mydb) -""" -import os -import sys -import mysql.connector -from helpers import ( - get_sql_base_path, - discover_table_files, - topological_sort_tables, - table_exists, - execute_sql_file, - parse_database_url -) - -DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") - -# Base path for SQL files (works in Docker and locally) -SQL_BASE_PATH = get_sql_base_path(__file__) - - -def initialize_schema(cur): - """Initialize database schema in correct dependency order.""" - print("\n๐Ÿ“‹ Discovering table files...") - - # Discover all table_*.sql files recursively - table_files = discover_table_files(SQL_BASE_PATH) - - if not table_files: - print(f"โš  No table_*.sql files found in {SQL_BASE_PATH}") - return False - - print(f"โœ“ Found {len(table_files)} table file(s)") - - # Sort tables by dependency order - print("๐Ÿ“Š Analyzing dependencies...") - sorted_tables = topological_sort_tables(table_files) - - print("\n๐Ÿ“‹ Initializing database schema...") - - success_count = 0 - for table_name, sql_file in sorted_tables: - description = f"{table_name} table" - # Check if table already exists (for idempotency) - if table_exists(cur, table_name): - print(f"โŠ˜ {description} already exists, skipping") - continue - - if execute_sql_file(cur, sql_file, description): - success_count += 1 - - print(f"\nโœ“ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") - return success_count > 0 - - -def main(): - """Main initialization function.""" - try: - print("๐Ÿ”Œ Connecting to database...") - db_params = parse_database_url(DATABASE_URL) - conn = mysql.connector.connect(**db_params) - cur = conn.cursor() - - # Verify connection - cur.execute("SELECT VERSION();") - version = cur.fetchone()[0] - print(f"โœ“ Connected to MySQL: {version}") - - # Initialize schema - initialize_schema(cur) - conn.commit() - - print("\nโœ… Database initialization complete!") - - cur.close() - conn.close() - - except mysql.connector.Error as e: - print(f"โœ— Database connection failed: {e}") - sys.exit(1) - except Exception as e: - print(f"โœ— Unexpected error: {e}") - sys.exit(1) - - -if __name__ == "__main__": - main() - diff --git a/src/scripts/test_gui.py b/src/scripts/test_gui.py new file mode 100644 index 0000000..6b22169 --- /dev/null +++ b/src/scripts/test_gui.py @@ -0,0 +1,520 @@ +""" +GUI Test Runner for mil-sql tests. + +Opens a tkinter window with buttons to run individual tests. +Automatically discovers test_*.py files in the scripts directory. + +Usage: + python src/scripts/test_gui.py +""" +import tkinter as tk +from tkinter import scrolledtext, ttk +import subprocess +import sys +import os +import json +from pathlib import Path +import threading +import re + + +class TestRunnerGUI: + def __init__(self, root): + self.root = root + self.root.title("Mil-SQL Test Runner") + self.root.geometry("800x600") + + # Get scripts directory + self.scripts_dir = Path(__file__).parent + + # Discover test files + self.test_files = self.discover_tests() + + # Create UI + self.create_ui() + + def discover_tests(self): + """Discover all test_*.py files in the scripts directory.""" + test_files = [] + for test_file in self.scripts_dir.glob("test_*.py"): + if test_file.name != "test_gui.py": # Exclude this file + # Extract test name from filename: test_.py -> + match = re.match(r"test_(.+)\.py$", test_file.name, re.IGNORECASE) + if match: + test_name = match.group(1).replace("_", " ").title() + test_files.append((test_name, test_file)) + return sorted(test_files) + + def create_ui(self): + """Create the user interface.""" + # Top frame for buttons + button_frame = tk.Frame(self.root, padx=10, pady=10) + button_frame.pack(fill=tk.X) + + tk.Label(button_frame, text="Available Tests:", font=("Arial", 12, "bold")).pack(anchor=tk.W) + + # Create buttons for each test + if not self.test_files: + tk.Label(button_frame, text="No test files found (test_*.py)", fg="gray").pack(anchor=tk.W, pady=5) + else: + buttons_frame = tk.Frame(button_frame) + buttons_frame.pack(fill=tk.X, pady=5) + + for test_name, test_file in self.test_files: + btn = tk.Button( + buttons_frame, + text=f"โ–ถ {test_name}", + command=lambda tf=test_file: self.run_test(tf), + width=20, + height=2, + font=("Arial", 10) + ) + btn.pack(side=tk.LEFT, padx=5, pady=5) + + # Run all tests button + tk.Button( + buttons_frame, + text="โ–ถ Run All Tests", + command=self.run_all_tests, + width=20, + height=2, + font=("Arial", 10, "bold"), + bg="#4CAF50", + fg="white" + ).pack(side=tk.LEFT, padx=5, pady=5) + + # Output area + output_frame = tk.Frame(self.root, padx=10, pady=10) + output_frame.pack(fill=tk.BOTH, expand=True) + + tk.Label(output_frame, text="Test Output:", font=("Arial", 12, "bold")).pack(anchor=tk.W) + + self.output_text = scrolledtext.ScrolledText( + output_frame, + wrap=tk.WORD, + width=80, + height=20, + font=("Consolas", 9) + ) + self.output_text.pack(fill=tk.BOTH, expand=True, pady=5) + + # Status bar + self.status_var = tk.StringVar(value="Ready") + status_bar = tk.Label( + self.root, + textvariable=self.status_var, + relief=tk.SUNKEN, + anchor=tk.W, + padx=10 + ) + status_bar.pack(side=tk.BOTTOM, fill=tk.X) + + # Clear button + clear_btn = tk.Button( + output_frame, + text="Clear Output", + command=self.clear_output, + width=15 + ) + clear_btn.pack(anchor=tk.E, pady=5) + + def clear_output(self): + """Clear the output text area.""" + self.output_text.delete(1.0, tk.END) + self.status_var.set("Ready") + + def log(self, message, tag=None): + """Add a message to the output area.""" + self.output_text.insert(tk.END, message + "\n", tag) + self.output_text.see(tk.END) + self.root.update() + + def run_test(self, test_file): + """Run a single test file.""" + test_name = test_file.stem.replace("test_", "").replace("_", " ").title() + self.log(f"\n{'='*60}") + self.log(f"Running: {test_name}") + self.log(f"File: {test_file.name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running: {test_name}...") + + # Run test in a separate thread to avoid blocking UI + thread = threading.Thread(target=self._run_test_thread, args=(test_file,), daemon=True) + thread.start() + + def _run_test_thread(self, test_file): + """Run test in a separate thread and capture output.""" + try: + # Get the test filename relative to scripts directory + test_filename = test_file.name + + # Run test in Docker container + # Use docker-compose exec to run the test in the api container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{test_filename}"] + + # Run the test + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + # Capture all output to detect JSON data + output_lines = [] + json_data = None + in_json_block = False + json_lines = [] + + # Stream output in real-time + for line in process.stdout: + line_stripped = line.rstrip() + output_lines.append(line_stripped) + self.log(line_stripped) + + # Detect JSON data block + if "TABLE_DATA_JSON_START" in line_stripped: + in_json_block = True + json_lines = [] + elif "TABLE_DATA_JSON_END" in line_stripped: + in_json_block = False + # Parse JSON + try: + json_str = "\n".join(json_lines) + json_data = json.loads(json_str) + except json.JSONDecodeError as e: + self.log(f"\nโš  Could not parse table data JSON: {e}", "error") + elif in_json_block: + json_lines.append(line_stripped) + + process.wait() + + # If we got JSON data, open viewer window locally + if json_data and process.returncode == 0: + self.log("\n๐ŸชŸ Opening table viewer window locally...") + self.root.after(100, lambda: self._open_table_viewer(json_data)) + + if process.returncode == 0: + self.log(f"\nโœ“ Test completed successfully!", "success") + self.status_var.set("Test passed!") + else: + self.log(f"\nโœ— Test failed with exit code {process.returncode}", "error") + self.status_var.set("Test failed!") + + except Exception as e: + self.log(f"\nโœ— Error running test: {e}", "error") + self.status_var.set(f"Error: {str(e)}") + + def _open_table_viewer(self, table_data): + """Open a table viewer window with the provided data.""" + try: + # Check which format we have + if 'locations' in table_data and 'supplies' in table_data: + # Format from test_locations_supplies.py - use interactive viewer + try: + # Import the interactive viewer from test_locations_supplies + import importlib.util + viewer_path = self.scripts_dir / "test_locations_supplies.py" + spec = importlib.util.spec_from_file_location("test_locations_supplies", viewer_path) + test_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(test_module) + + # Convert JSON data back to tuples + locations_data = [tuple(row) for row in table_data['locations']['data']] + supplies_data = [tuple(row) for row in table_data['supplies']['data']] + + # Use the interactive viewer + test_module.create_table_viewer(locations_data, supplies_data) + except Exception as e: + # Fallback to simple viewer + self.log(f"\nโš  Could not load interactive viewer, using simple viewer: {e}", "error") + locations_data = [tuple(row) for row in table_data['locations']['data']] + supplies_data = [tuple(row) for row in table_data['supplies']['data']] + self._create_table_viewer_window(locations_data, supplies_data) + elif 'successful_tables' in table_data or 'failed_tables' in table_data: + # Format from test_tables.py - show table creation results + self._create_all_tables_viewer_window(table_data) + else: + self.log(f"\nโš  Unknown table data format", "error") + except Exception as e: + self.log(f"\nโš  Could not open table viewer: {e}", "error") + import traceback + self.log(traceback.format_exc(), "error") + + def _create_table_viewer_window(self, locations_data, supplies_data): + """Create a tkinter window to display table contents.""" + root = tk.Tk() + root.title("Database Table Contents - Locations & Supplies") + root.geometry("1200x700") + + # Create notebook for tabs + notebook = ttk.Notebook(root) + notebook.pack(fill=tk.BOTH, expand=True, padx=10, pady=10) + + # Locations tab + locations_frame = ttk.Frame(notebook) + notebook.add(locations_frame, text=f"Locations ({len(locations_data)} rows)") + + # Locations treeview + loc_tree = ttk.Treeview(locations_frame, columns=('name', 'x', 'y', 'width', 'height', 'type'), show='headings', height=20) + loc_tree.heading('name', text='Name') + loc_tree.heading('x', text='X') + loc_tree.heading('y', text='Y') + loc_tree.heading('width', text='Width') + loc_tree.heading('height', text='Height') + loc_tree.heading('type', text='Type') + + # Configure column widths + loc_tree.column('name', width=150, anchor=tk.W) + loc_tree.column('x', width=60, anchor=tk.CENTER) + loc_tree.column('y', width=60, anchor=tk.CENTER) + loc_tree.column('width', width=80, anchor=tk.CENTER) + loc_tree.column('height', width=80, anchor=tk.CENTER) + loc_tree.column('type', width=100, anchor=tk.W) + + # Add scrollbar for locations + loc_scroll = ttk.Scrollbar(locations_frame, orient=tk.VERTICAL, command=loc_tree.yview) + loc_tree.configure(yscrollcommand=loc_scroll.set) + + loc_tree.pack(side=tk.LEFT, fill=tk.BOTH, expand=True) + loc_scroll.pack(side=tk.RIGHT, fill=tk.Y) + + # Populate locations data + for row in locations_data: + loc_tree.insert('', tk.END, values=row) + + # Supplies tab + supplies_frame = ttk.Frame(notebook) + notebook.add(supplies_frame, text=f"Supplies ({len(supplies_data)} rows)") + + # Supplies treeview + sup_tree = ttk.Treeview(supplies_frame, columns=('id', 'name', 'amount', 'last_order_date', 'location'), show='headings', height=20) + sup_tree.heading('id', text='ID') + sup_tree.heading('name', text='Name') + sup_tree.heading('amount', text='Amount') + sup_tree.heading('last_order_date', text='Last Order Date') + sup_tree.heading('location', text='Location') + + # Configure column widths + sup_tree.column('id', width=50, anchor=tk.CENTER) + sup_tree.column('name', width=200, anchor=tk.W) + sup_tree.column('amount', width=80, anchor=tk.CENTER) + sup_tree.column('last_order_date', width=120, anchor=tk.CENTER) + sup_tree.column('location', width=150, anchor=tk.W) + + # Add scrollbar for supplies + sup_scroll = ttk.Scrollbar(supplies_frame, orient=tk.VERTICAL, command=sup_tree.yview) + sup_tree.configure(yscrollcommand=sup_scroll.set) + + sup_tree.pack(side=tk.LEFT, fill=tk.BOTH, expand=True) + sup_scroll.pack(side=tk.RIGHT, fill=tk.Y) + + # Populate supplies data + for row in supplies_data: + sup_tree.insert('', tk.END, values=row) + + # Status bar + status_frame = tk.Frame(root) + status_frame.pack(fill=tk.X, padx=10, pady=5) + status_label = tk.Label(status_frame, text=f"Locations: {len(locations_data)} | Supplies: {len(supplies_data)}", + relief=tk.SUNKEN, anchor=tk.W, padx=10) + status_label.pack(fill=tk.X) + + # Close button + button_frame = tk.Frame(root) + button_frame.pack(pady=10) + tk.Button(button_frame, text="Close", command=root.destroy, width=15, height=2).pack() + + root.mainloop() + + def _create_all_tables_viewer_window(self, table_data): + """Create a tkinter window to display table creation results.""" + root = tk.Tk() + database_name = table_data.get('database', 'Unknown') + summary = table_data.get('summary', {}) + successful_tables = table_data.get('successful_tables', []) + failed_tables = table_data.get('failed_tables', []) + + root.title(f"Table Creation Results - {database_name}") + root.geometry("800x600") + + # Main frame + main_frame = tk.Frame(root, padx=20, pady=20) + main_frame.pack(fill=tk.BOTH, expand=True) + + # Title + title_label = tk.Label( + main_frame, + text=f"Database: {database_name}", + font=("Arial", 14, "bold") + ) + title_label.pack(anchor=tk.W, pady=(0, 10)) + + # Summary section + summary_frame = tk.LabelFrame(main_frame, text="Summary", padx=10, pady=10) + summary_frame.pack(fill=tk.X, pady=10) + + summary_text = f"""Created: {summary.get('created', 0)}/{summary.get('total', 0)} tables +Successful: {summary.get('successful_count', 0)} tables +Failed: {summary.get('failed_count', 0)} tables +All Valid: {'Yes โœ“' if summary.get('all_valid', False) else 'No โœ—'}""" + + summary_label = tk.Label( + summary_frame, + text=summary_text, + font=("Consolas", 10), + justify=tk.LEFT + ) + summary_label.pack(anchor=tk.W) + + # Successful tables section + if successful_tables: + success_frame = tk.LabelFrame(main_frame, text=f"โœ“ Successful Tables ({len(successful_tables)})", padx=10, pady=10) + success_frame.pack(fill=tk.BOTH, expand=True, pady=10) + + success_listbox = tk.Listbox(success_frame, font=("Consolas", 10)) + success_listbox.pack(fill=tk.BOTH, expand=True) + + for table_name in sorted(successful_tables): + success_listbox.insert(tk.END, f" โœ“ {table_name}") + else: + success_frame = tk.LabelFrame(main_frame, text="โœ“ Successful Tables (0)", padx=10, pady=10) + success_frame.pack(fill=tk.X, pady=10) + tk.Label(success_frame, text="No successful tables", fg="gray").pack() + + # Failed tables section + if failed_tables: + failed_frame = tk.LabelFrame(main_frame, text=f"โœ— Failed Tables ({len(failed_tables)})", padx=10, pady=10) + failed_frame.pack(fill=tk.BOTH, expand=True, pady=10) + + failed_listbox = tk.Listbox(failed_frame, font=("Consolas", 10), fg="red") + failed_listbox.pack(fill=tk.BOTH, expand=True) + + for table_name in sorted(failed_tables): + failed_listbox.insert(tk.END, f" โœ— {table_name}") + else: + failed_frame = tk.LabelFrame(main_frame, text="โœ— Failed Tables (0)", padx=10, pady=10) + failed_frame.pack(fill=tk.X, pady=10) + tk.Label(failed_frame, text="No failed tables", fg="green").pack() + + # Status bar + status_frame = tk.Frame(root) + status_frame.pack(fill=tk.X, padx=10, pady=5) + status_label = tk.Label( + status_frame, + text=f"Total: {summary.get('total', 0)} tables | Successful: {len(successful_tables)} | Failed: {len(failed_tables)}", + relief=tk.SUNKEN, + anchor=tk.W, + padx=10 + ) + status_label.pack(fill=tk.X) + + # Close button + button_frame = tk.Frame(root) + button_frame.pack(pady=10) + tk.Button(button_frame, text="Close", command=root.destroy, width=15, height=2).pack() + + root.mainloop() + + def run_all_tests(self): + """Run all discovered tests sequentially.""" + if not self.test_files: + self.log("No tests to run.") + return + + self.log(f"\n{'='*60}") + self.log(f"Running All Tests ({len(self.test_files)} test(s))") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running all tests...") + + # Run tests in sequence + thread = threading.Thread(target=self._run_all_tests_thread, daemon=True) + thread.start() + + def _run_all_tests_thread(self): + """Run all tests in a separate thread.""" + passed = 0 + failed = 0 + + for test_name, test_file in self.test_files: + self.log(f"\n{'='*60}") + self.log(f"Running: {test_name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Running: {test_name}... ({passed + failed + 1}/{len(self.test_files)})") + + try: + # Get the test filename relative to scripts directory + test_filename = test_file.name + + # Run test in Docker container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{test_filename}"] + + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + for line in process.stdout: + self.log(line.rstrip()) + + process.wait() + + if process.returncode == 0: + self.log(f"\nโœ“ {test_name} PASSED\n", "success") + passed += 1 + else: + self.log(f"\nโœ— {test_name} FAILED\n", "error") + failed += 1 + + except Exception as e: + self.log(f"\nโœ— {test_name} ERROR: {e}\n", "error") + failed += 1 + + # Summary + self.log(f"\n{'='*60}") + self.log(f"Test Summary: {passed} passed, {failed} failed out of {len(self.test_files)} total") + self.log(f"{'='*60}\n") + + if failed == 0: + self.status_var.set(f"All tests passed! ({passed}/{len(self.test_files)})") + else: + self.status_var.set(f"Some tests failed: {passed} passed, {failed} failed") + + +def main(): + """Main entry point.""" + # GUI runs locally on Windows desktop + # Tests are executed in Docker containers via docker-compose exec + + root = tk.Tk() + app = TestRunnerGUI(root) + + # Configure text tags for colors + app.output_text.tag_config("success", foreground="green") + app.output_text.tag_config("error", foreground="red") + + # Welcome message + app.log("Mil-SQL Test Runner") + app.log("=" * 60) + app.log(f"Found {len(app.test_files)} test(s)") + app.log("Tests will run in Docker containers.") + app.log("Click a button above to run a test, or 'Run All Tests' to run everything.\n") + + root.mainloop() + + +if __name__ == "__main__": + main() + diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py new file mode 100644 index 0000000..efed26a --- /dev/null +++ b/src/scripts/test_locations_supplies.py @@ -0,0 +1,542 @@ +""" +Test script for locations and supplies data insertion and verification. + +This script: +1. Inserts sample location data +2. Inserts sample supplies data (linked to locations) +3. Verifies the data was inserted correctly +4. Displays table contents in a GUI window + +Usage: + python src/scripts/test_locations_supplies.py +""" +import os +import sys +import json +import mysql.connector +import mysql.connector.errors +import tkinter.messagebox +from helpers import parse_database_url, table_exists + +# Try to import tkinter, but don't fail if not available (e.g., in Docker) +try: + import tkinter as tk + from tkinter import ttk + HAS_TKINTER = True +except ImportError: + HAS_TKINTER = False + +# Get database URL +DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + + +def get_connection(): + """Get database connection.""" + db_params = parse_database_url(DATABASE_URL) + return mysql.connector.connect(**db_params) + + +def ensure_tables_exist(cur): + """Ensure locations and supplies tables exist, create if needed.""" + from helpers import get_sql_base_path, discover_table_files, topological_sort_tables, execute_sql_file + + SQL_BASE_PATH = get_sql_base_path(__file__) + table_files = discover_table_files(SQL_BASE_PATH) + + # Find locations and supplies tables + locations_file = None + supplies_file = None + + for table_name, sql_file in table_files: + if table_name.lower() == 'locations': + locations_file = sql_file + elif table_name.lower() == 'supplies': + supplies_file = sql_file + + # Create tables if they don't exist + if not table_exists(cur, 'locations'): + if locations_file: + print("๐Ÿ“‹ Creating locations table...") + execute_sql_file(cur, locations_file, "locations table") + else: + print("โœ— locations table not found and cannot be created") + return False + + if not table_exists(cur, 'supplies'): + if supplies_file: + print("๐Ÿ“‹ Creating supplies table...") + execute_sql_file(cur, supplies_file, "supplies table") + else: + print("โœ— supplies table not found and cannot be created") + return False + + return True + + +def insert_sample_locations(cur): + """Insert sample location data - just 3 containers.""" + print("\n๐Ÿ“ฆ Inserting sample locations...") + + sample_locations = [ + ('Container A', 100, 100, 200, 200, 'container'), + ('Container B', 400, 100, 200, 200, 'container'), + ('Container C', 700, 100, 200, 200, 'container'), + ] + + inserted = 0 + skipped = 0 + + for name, x, y, width, height, loc_type in sample_locations: + try: + cur.execute( + "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", + (name, x, y, width, height, loc_type) + ) + inserted += 1 + except mysql.connector.IntegrityError: + # Location already exists, skip + skipped += 1 + except Exception as e: + print(f" โœ— Failed to insert {name}: {e}") + + print(f" โœ“ Inserted {inserted} location(s), skipped {skipped} existing") + return inserted, skipped + + +def insert_sample_supplies(cur): + """Insert sample supplies data - distributed across 3 containers.""" + print("\n๐Ÿ“ฆ Inserting sample supplies...") + + sample_supplies = [ + # Container A + ('Resistors 1kฮฉ', 50, '2024-01-15', 'Container A'), + ('Capacitors 100ยตF', 30, '2024-01-20', 'Container A'), + ('Arduino Uno', 5, '2024-02-01', 'Container A'), + ('Breadboards', 10, '2024-02-10', 'Container A'), + # Container B + ('Screws M3', 200, '2024-01-10', 'Container B'), + ('Bolts M3', 150, '2024-01-10', 'Container B'), + ('Nuts M3', 300, '2024-01-10', 'Container B'), + ('Wires Red', 100, '2024-01-05', 'Container B'), + # Container C + ('LEDs Red', 50, '2024-01-12', 'Container C'), + ('LEDs Green', 50, '2024-01-12', 'Container C'), + ('LEDs Blue', 50, '2024-01-12', 'Container C'), + ('Multimeter', 2, '2024-01-08', 'Container C'), + ] + + inserted = 0 + failed = 0 + + for name, amount, last_order_date, location in sample_supplies: + try: + cur.execute( + "INSERT INTO supplies (name, amount, last_order_date, location) VALUES (%s, %s, %s, %s)", + (name, amount, last_order_date, location) + ) + inserted += 1 + except Exception as e: + print(f" โœ— Failed to insert {name}: {e}") + failed += 1 + + print(f" โœ“ Inserted {inserted} supply item(s), {failed} failed") + return inserted, failed + + +def display_table_contents(table_name, columns, rows): + """Display formatted table contents (console output).""" + print(f"\n๐Ÿ“Š {table_name.upper()} Table Contents:") + print("=" * 80) + + if not rows: + print(f" (empty)") + return + + # Calculate column widths + col_widths = [len(col) for col in columns] + for row in rows: + for i, val in enumerate(row): + col_widths[i] = max(col_widths[i], len(str(val)) if val else 0) + + # Print header + header = " | ".join(col.ljust(col_widths[i]) for i, col in enumerate(columns)) + print(f" {header}") + print(" " + "-" * len(header)) + + # Print rows + for row in rows: + row_str = " | ".join(str(val).ljust(col_widths[i]) if val is not None else "NULL".ljust(col_widths[i]) + for i, val in enumerate(row)) + print(f" {row_str}") + + print(f"\n Total rows: {len(rows)}") + + +def create_table_viewer(locations_data, supplies_data): + """Create an interactive tkinter window to display and move supplies between containers.""" + root = tk.Tk() + root.title("Container Supplies - Move Supplies Between Containers") + root.geometry("1000x700") + + # Get container names + container_names = [row[0] for row in locations_data if 'Container' in row[0]] + if len(container_names) < 3: + container_names = ['Container A', 'Container B', 'Container C'] + + # Database connection for refreshing data + # When running locally (not in Docker), use localhost instead of 'db' + def get_db_connection(): + db_params = parse_database_url(DATABASE_URL) + # If host is 'db' and we're not in Docker, use localhost + if db_params.get('host') == 'db' and not os.path.exists("/app"): + db_params['host'] = 'localhost' + # Try to connect - if auth plugin fails, try without specifying it + try: + return mysql.connector.connect(**db_params) + except mysql.connector.errors.DatabaseError as e: + if 'auth' in str(e).lower() or 'plugin' in str(e).lower(): + # Try with different auth plugin + db_params['auth_plugin'] = 'caching_sha2_password' + try: + return mysql.connector.connect(**db_params) + except: + # Last resort: try mysql_native_password + db_params['auth_plugin'] = 'mysql_native_password' + return mysql.connector.connect(**db_params) + raise + + def refresh_supplies(): + """Refresh supplies data from database - only for our 3 containers.""" + conn = get_db_connection() + cur = conn.cursor() + placeholders = ','.join(['%s'] * len(container_names)) + cur.execute( + f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", + container_names + ) + rows = cur.fetchall() + cur.close() + conn.close() + return rows + + def update_display(supplies_rows=None): + """Update the display with current supplies. + + Args: + supplies_rows: Optional list of supply rows. If None, fetches from database. + """ + # Get fresh data if not provided + if supplies_rows is None: + try: + supplies = refresh_supplies() + except Exception as e: + # If refresh fails, show error but don't crash + print(f"Warning: Could not refresh from database: {e}") + return + else: + supplies = supplies_rows + + # Group supplies by location + supplies_by_location = {} + for row in supplies: + location = row[4] # location is last column + if location not in supplies_by_location: + supplies_by_location[location] = [] + supplies_by_location[location].append(row) + + # Update each container display + for container_name in container_names: + if container_name in container_frames: + listbox = container_frames[container_name] + listbox.delete(0, tk.END) + + if container_name in supplies_by_location: + total_items = 0 + for supply in supplies_by_location[container_name]: + name = supply[1] # name + amount = supply[2] # amount + total_items += amount + listbox.insert(tk.END, f"{name}: {amount}") + + # Update container label + if container_name in container_labels: + container_labels[container_name].config( + text=f"{container_name} ({len(supplies_by_location[container_name])} types, {total_items} total items)" + ) + else: + listbox.insert(0, "(empty)") + if container_name in container_labels: + container_labels[container_name].config(text=f"{container_name} (empty)") + + def move_supply(from_container, to_container): + """Move all supplies from one container to another.""" + conn = get_db_connection() + cur = conn.cursor() + + # Get all supplies in source container + cur.execute( + "SELECT id, name, amount FROM supplies WHERE location = %s", + (from_container,) + ) + supplies_to_move = cur.fetchall() + + if not supplies_to_move: + tk.messagebox.showinfo("Info", f"No supplies in {from_container} to move.") + cur.close() + conn.close() + return + + # Move each supply + moved_count = 0 + for supply_id, name, amount in supplies_to_move: + try: + # Check if supply exists at destination + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (name, to_container) + ) + dest_entry = cur.fetchone() + + if dest_entry: + # Update destination + new_amount = dest_entry[1] + amount + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_amount, dest_entry[0]) + ) + else: + # Create at destination + cur.execute( + "INSERT INTO supplies (name, amount, location) VALUES (%s, %s, %s)", + (name, amount, to_container) + ) + + # Delete from source + cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) + moved_count += 1 + except Exception as e: + print(f"Error moving {name}: {e}") + + conn.commit() + cur.close() + conn.close() + + update_display() + tk.messagebox.showinfo("Success", f"Moved {moved_count} supply type(s) from {from_container} to {to_container}.") + + # Main container + main_frame = tk.Frame(root, padx=20, pady=20) + main_frame.pack(fill=tk.BOTH, expand=True) + + # Title + title_label = tk.Label(main_frame, text="Container Supplies", font=("Arial", 16, "bold")) + title_label.pack(pady=(0, 20)) + + # Container frames (3 columns) + containers_frame = tk.Frame(main_frame) + containers_frame.pack(fill=tk.BOTH, expand=True) + + container_frames = {} + container_labels = {} + + for i, container_name in enumerate(container_names[:3]): + # Container column + col_frame = tk.Frame(containers_frame) + col_frame.grid(row=0, column=i, padx=20, sticky="nsew") + containers_frame.columnconfigure(i, weight=1) + + # Container label + label = tk.Label(col_frame, text=f"{container_name}", font=("Arial", 12, "bold")) + label.pack(pady=(0, 10)) + container_labels[container_name] = label + + # Supplies listbox + listbox = tk.Listbox(col_frame, width=30, height=15, font=("Consolas", 10)) + listbox.pack(fill=tk.BOTH, expand=True) + container_frames[container_name] = listbox + + # Move buttons for this container + buttons_frame = tk.Frame(col_frame) + buttons_frame.pack(pady=10) + + for other_container in container_names[:3]: + if other_container != container_name: + btn_text = f"Move to {other_container}" + btn = tk.Button( + buttons_frame, + text=btn_text, + command=lambda f=container_name, t=other_container: move_supply(f, t), + width=20, + height=2, + font=("Arial", 10, "bold"), + bg="#4CAF50", + fg="white" + ) + btn.pack(pady=5, fill=tk.X) + + # Refresh button + refresh_btn = tk.Button( + main_frame, + text="Refresh", + command=update_display, + width=15, + height=2 + ) + refresh_btn.pack(pady=10) + + # Close button + close_btn = tk.Button( + main_frame, + text="Close", + command=root.destroy, + width=15, + height=2 + ) + close_btn.pack() + + # Initial display - use passed data, convert to same format as database rows + # supplies_data format: (id, name, amount, last_order_date, location) + initial_supplies = [] + for row in supplies_data: + # Convert tuple to list format matching database query result + initial_supplies.append(row) + + update_display(initial_supplies) + + root.mainloop() + + +def main(): + """Main test function.""" + try: + print("๐Ÿงช Starting Locations & Supplies Data Test") + print("=" * 60) + + # Connect to database + print("\n๐Ÿ”Œ Connecting to database...") + conn = get_connection() + cur = conn.cursor() + + # Verify connection + cur.execute("SELECT VERSION();") + version = cur.fetchone()[0] + print(f"โœ“ Connected to MySQL: {version}") + + # Ensure tables exist + print("\n๐Ÿ” Checking tables...") + if not ensure_tables_exist(cur): + print("โœ— Required tables not available") + conn.rollback() + cur.close() + conn.close() + sys.exit(1) + conn.commit() + + # Insert sample locations + loc_inserted, loc_skipped = insert_sample_locations(cur) + conn.commit() + + # Clear old supplies for our containers to start fresh + container_names = ['Container A', 'Container B', 'Container C'] + print("\n๐Ÿ—‘๏ธ Clearing old supplies for test containers...") + placeholders = ','.join(['%s'] * len(container_names)) + cur.execute( + f"DELETE FROM supplies WHERE location IN ({placeholders})", + container_names + ) + cleared_count = cur.rowcount + conn.commit() + print(f" โœ“ Cleared {cleared_count} old supply entry/entries") + + # Insert sample supplies + sup_inserted, sup_failed = insert_sample_supplies(cur) + conn.commit() + + # Get table contents for GUI display - only our 3 containers + print("\n๐Ÿ“Š Fetching table contents...") + placeholders = ','.join(['%s'] * len(container_names)) + cur.execute( + f"SELECT name, x, y, width, height, type FROM locations WHERE name IN ({placeholders}) ORDER BY name", + container_names + ) + locations_data = cur.fetchall() + + cur.execute( + f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", + container_names + ) + supplies_data = cur.fetchall() + + # Display table contents in console + print("\n" + "=" * 60) + display_table_contents('locations', ['name', 'x', 'y', 'width', 'height', 'type'], locations_data) + display_table_contents('supplies', ['id', 'name', 'amount', 'last_order_date', 'location'], supplies_data) + + # Summary + print("\n" + "=" * 60) + print("โœ… TEST SUMMARY:") + print(f" Locations: {loc_inserted} inserted, {loc_skipped} skipped (already existed)") + print(f" Supplies: {sup_inserted} inserted, {sup_failed} failed") + print("=" * 60) + + cur.close() + conn.close() + + # Convert data for JSON output (for test_gui.py to display locally) + # Convert tuples to lists for JSON serialization + locations_json = [list(row) for row in locations_data] + supplies_json = [list(row) for row in supplies_data] + + # Convert date objects to strings + for row in supplies_json: + if row[3] is not None and hasattr(row[3], 'isoformat'): + row[3] = row[3].isoformat() + + table_data = { + 'locations': { + 'columns': ['name', 'x', 'y', 'width', 'height', 'type'], + 'data': locations_json + }, + 'supplies': { + 'columns': ['id', 'name', 'amount', 'last_order_date', 'location'], + 'data': supplies_json + } + } + + # Output JSON with special marker for test_gui.py to detect + print("\n" + "=" * 60) + print("TABLE_DATA_JSON_START") + print(json.dumps(table_data, indent=2)) + print("TABLE_DATA_JSON_END") + print("=" * 60) + + # Try to open GUI window locally if tkinter is available and we're not in Docker + if HAS_TKINTER and not os.path.exists("/app"): + print("\n๐ŸชŸ Opening table viewer window...") + try: + create_table_viewer(locations_data, supplies_data) + except Exception as e: + print(f"โš  Could not open GUI window: {e}") + print(" Table contents displayed above in console output.") + + # Exit with appropriate code + if sup_failed > 0: + print("\nโš  Some supplies failed to insert (may be due to missing locations)") + sys.exit(1) + else: + print("\nโœ… All data inserted successfully!") + sys.exit(0) + + except mysql.connector.Error as e: + print(f"\nโœ— Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"\nโœ— Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + main() + diff --git a/src/scripts/test_tables.py b/src/scripts/test_tables.py index f8faf4c..749cba7 100644 --- a/src/scripts/test_tables.py +++ b/src/scripts/test_tables.py @@ -13,6 +13,7 @@ """ import os import sys +import json import mysql.connector from helpers import ( get_sql_base_path, @@ -223,9 +224,49 @@ def main(): created_count, total_count = initialize_schema(cur, test_db_name) conn.commit() - # Verify tables + # Verify tables and collect status + table_files = discover_table_files(SQL_BASE_PATH) + sorted_tables = topological_sort_tables(table_files) + + # Track which tables were created successfully and which failed + successful_tables = [] + failed_tables = [] + + for table_name, _ in sorted_tables: + if table_exists(cur, table_name, test_db_name): + # Check if it has valid structure + columns = get_table_columns(cur, table_name, test_db_name) + if len(columns) > 0: + successful_tables.append(table_name) + else: + failed_tables.append(table_name) + else: + failed_tables.append(table_name) + + # Also check verify_tables for overall status all_valid = verify_tables(cur, test_db_name) + # Output JSON data for GUI viewer + table_data = { + 'database': test_db_name, + 'successful_tables': successful_tables, + 'failed_tables': failed_tables, + 'summary': { + 'created': created_count, + 'total': total_count, + 'all_valid': all_valid, + 'successful_count': len(successful_tables), + 'failed_count': len(failed_tables) + } + } + + # Output JSON with special marker for test_gui.py to detect + print("\n" + "=" * 60) + print("TABLE_DATA_JSON_START") + print(json.dumps(table_data, indent=2)) + print("TABLE_DATA_JSON_END") + print("=" * 60) + # Summary print("\n" + "="*50) if all_valid and created_count == total_count: diff --git a/src/sql/supplies/table_supplies.sql b/src/sql/supplies/table_supplies.sql index a0e3691..0f18b44 100644 --- a/src/sql/supplies/table_supplies.sql +++ b/src/sql/supplies/table_supplies.sql @@ -3,5 +3,6 @@ CREATE TABLE supplies ( name VARCHAR(200) NOT NULL, amount INTEGER NOT NULL DEFAULT 0, last_order_date DATE, - location VARCHAR(50) REFERENCES locations(name) + location VARCHAR(50) REFERENCES locations(name), + UNIQUE KEY unique_supply_location (name, location) ); From fc8277c3a818cf8ef59d763ac02b8a3e201a505c Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 13:09:58 -0500 Subject: [PATCH 16/20] Location move test now working --- requirements.txt | 1 + src/scripts/test_locations_supplies.py | 170 ++++++++++++++----------- 2 files changed, 98 insertions(+), 73 deletions(-) diff --git a/requirements.txt b/requirements.txt index b32c22c..c032353 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,5 @@ sqlalchemy==2.0.30 alembic==1.13.2 flask==3.0.0 flask-cors==4.0.0 +requests==2.31.0 diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py index efed26a..e8c28ad 100644 --- a/src/scripts/test_locations_supplies.py +++ b/src/scripts/test_locations_supplies.py @@ -16,6 +16,7 @@ import mysql.connector import mysql.connector.errors import tkinter.messagebox +import requests from helpers import parse_database_url, table_exists # Try to import tkinter, but don't fail if not available (e.g., in Docker) @@ -104,24 +105,27 @@ def insert_sample_locations(cur): def insert_sample_supplies(cur): - """Insert sample supplies data - distributed across 3 containers.""" + """Insert sample supplies data - DISTRIBUTED SUPPLY in all containers plus 9 different supplies.""" print("\n๐Ÿ“ฆ Inserting sample supplies...") + # Same supply name in all containers, different amounts + supply_name = "DISTRIBUTED SUPPLY" sample_supplies = [ - # Container A - ('Resistors 1kฮฉ', 50, '2024-01-15', 'Container A'), - ('Capacitors 100ยตF', 30, '2024-01-20', 'Container A'), + # DISTRIBUTED SUPPLY in all 3 containers + (supply_name, 10, '2024-01-15', 'Container A'), + (supply_name, 25, '2024-01-20', 'Container B'), + (supply_name, 5, '2024-02-01', 'Container C'), + # Container A - 3 additional supplies + ('Resistors 1K', 50, '2024-01-15', 'Container A'), + ('Capacitors 100UF', 30, '2024-01-20', 'Container A'), ('Arduino Uno', 5, '2024-02-01', 'Container A'), - ('Breadboards', 10, '2024-02-10', 'Container A'), - # Container B + # Container B - 3 additional supplies ('Screws M3', 200, '2024-01-10', 'Container B'), ('Bolts M3', 150, '2024-01-10', 'Container B'), ('Nuts M3', 300, '2024-01-10', 'Container B'), - ('Wires Red', 100, '2024-01-05', 'Container B'), - # Container C + # Container C - 3 additional supplies ('LEDs Red', 50, '2024-01-12', 'Container C'), ('LEDs Green', 50, '2024-01-12', 'Container C'), - ('LEDs Blue', 50, '2024-01-12', 'Container C'), ('Multimeter', 2, '2024-01-08', 'Container C'), ] @@ -206,18 +210,37 @@ def get_db_connection(): raise def refresh_supplies(): - """Refresh supplies data from database - only for our 3 containers.""" - conn = get_db_connection() - cur = conn.cursor() - placeholders = ','.join(['%s'] * len(container_names)) - cur.execute( - f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", - container_names - ) - rows = cur.fetchall() - cur.close() - conn.close() - return rows + """Refresh supplies data from API - only for our 3 containers.""" + try: + api_url = "http://localhost:5000/api/supplies" + response = requests.get(api_url) + + if response.status_code != 200: + print(f"Warning: API returned {response.status_code}: {response.text}") + return [] + + all_supplies = response.json() + + # Filter to only our containers and convert to tuple format + filtered_supplies = [] + for supply in all_supplies: + if supply.get('location') in container_names: + # Convert to tuple format: (id, name, amount, last_order_date, location) + filtered_supplies.append(( + supply.get('id'), + supply.get('name'), + supply.get('amount'), + supply.get('last_order_date'), + supply.get('location') + )) + + return filtered_supplies + except requests.exceptions.ConnectionError: + print("Warning: Cannot connect to API, returning empty list") + return [] + except Exception as e: + print(f"Warning: Error fetching supplies from API: {e}") + return [] def update_display(supplies_rows=None): """Update the display with current supplies. @@ -269,60 +292,61 @@ def update_display(supplies_rows=None): container_labels[container_name].config(text=f"{container_name} (empty)") def move_supply(from_container, to_container): - """Move all supplies from one container to another.""" - conn = get_db_connection() - cur = conn.cursor() - - # Get all supplies in source container - cur.execute( - "SELECT id, name, amount FROM supplies WHERE location = %s", - (from_container,) - ) - supplies_to_move = cur.fetchall() - - if not supplies_to_move: - tk.messagebox.showinfo("Info", f"No supplies in {from_container} to move.") - cur.close() - conn.close() - return - - # Move each supply - moved_count = 0 - for supply_id, name, amount in supplies_to_move: - try: - # Check if supply exists at destination - cur.execute( - "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", - (name, to_container) - ) - dest_entry = cur.fetchone() - - if dest_entry: - # Update destination - new_amount = dest_entry[1] + amount - cur.execute( - "UPDATE supplies SET amount = %s WHERE id = %s", - (new_amount, dest_entry[0]) - ) - else: - # Create at destination - cur.execute( - "INSERT INTO supplies (name, amount, location) VALUES (%s, %s, %s)", - (name, amount, to_container) + """Move all supplies from one container to another using the API.""" + try: + # Get all supplies in source container using API + api_url = "http://localhost:5000/api/supplies" + response = requests.get(api_url, params={"location": from_container}) + + if response.status_code != 200: + tk.messagebox.showerror("Error", f"Failed to fetch supplies: {response.text}") + return + + supplies_to_move = response.json() + + if not supplies_to_move: + tk.messagebox.showinfo("Info", f"No supplies in {from_container} to move.") + return + + # Move each supply using the API move endpoint + moved_count = 0 + failed_count = 0 + + for supply in supplies_to_move: + try: + # Move all of this supply - omit amount to move all + move_payload = { + "name": supply["name"], + "from_location": from_container, + "to_location": to_container + } + # Don't include amount - API will move all if amount is not provided + move_response = requests.post( + "http://localhost:5000/api/supplies/move", + json=move_payload ) + + if move_response.status_code == 200: + moved_count += 1 + else: + failed_count += 1 + print(f"Failed to move {supply['name']}: {move_response.text}") + except Exception as e: + failed_count += 1 + print(f"Error moving {supply['name']}: {e}") + + # Refresh display + update_display() + + if failed_count == 0: + tk.messagebox.showinfo("Success", f"Moved {moved_count} supply type(s) from {from_container} to {to_container}.") + else: + tk.messagebox.showwarning("Partial Success", f"Moved {moved_count} supply type(s), {failed_count} failed.") - # Delete from source - cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) - moved_count += 1 - except Exception as e: - print(f"Error moving {name}: {e}") - - conn.commit() - cur.close() - conn.close() - - update_display() - tk.messagebox.showinfo("Success", f"Moved {moved_count} supply type(s) from {from_container} to {to_container}.") + except requests.exceptions.ConnectionError: + tk.messagebox.showerror("Error", "Cannot connect to API. Make sure the API is running on http://localhost:5000") + except Exception as e: + tk.messagebox.showerror("Error", f"Failed to move supplies: {str(e)}") # Main container main_frame = tk.Frame(root, padx=20, pady=20) From 9dcfa130ea779b12c47afd3a759bef3c733cf3e8 Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 13:22:34 -0500 Subject: [PATCH 17/20] Updated readme --- README.md | 127 +++++++++++++++++++++++-- src/scripts/test_locations_supplies.py | 7 +- 2 files changed, 120 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 51e7e98..e7a738d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# MySQL Service Setup +# Mil-SQL API Service -This project provides a simple Dockerized MySQL database and a Python service for interacting with it. +This project provides a Dockerized MySQL database with a Flask REST API for managing locations and supplies inventory. --- @@ -8,6 +8,7 @@ This project provides a simple Dockerized MySQL database and a Python service fo - Docker (>= 20.10) - Docker Compose (v2 recommended) - Make (optional, for convenience) +- Python 3.11+ (for local test GUI) --- @@ -20,28 +21,88 @@ make up This will: - Start a MySQL 8.0 database container -- Initialize the database schema (idempotent - safe to run multiple times) +- Start a Flask API container on port 5000 +- Automatically initialize the database schema (idempotent - safe to run multiple times) + +### 2. Run tests +```bash +make test +``` + +Opens a GUI test runner that allows you to: +- Validate table creation +- Test locations and supplies data insertion +- Interactively move supplies between containers + +--- + +## API Endpoints + +The Flask API runs on `http://localhost:5000` and provides the following endpoints: + +### Locations +- `GET /api/locations` - Get all locations +- `GET /api/locations/` - Get a specific location +- `POST /api/locations` - Create a new location +- `PUT /api/locations/` - Update a location +- `DELETE /api/locations/` - Delete a location + +### Supplies +- `GET /api/supplies` - Get all supplies (optional `?location=` filter) +- `GET /api/supplies/` - Get a specific supply +- `POST /api/supplies` - Create/update a supply (adds to existing if same name+location) +- `PUT /api/supplies/` - Update supply amount or last_order_date +- `DELETE /api/supplies/` - Delete a supply +- `POST /api/supplies/move` - Move supplies between locations + +### Health Check +- `GET /health` - API health status --- ## Database Initialization -The startup script (`src/scripts/app.py`) handles database schema initialization automatically. It: +The Flask API (`src/api/app.py`) handles database schema initialization automatically on startup. It: 1. **Creates tables** in the correct dependency order (idempotent) 2. **Only creates tables that don't exist** - safe to run multiple times 3. **Never drops existing data** - production-safe +4. **Handles dependencies** - automatically sorts tables by foreign key relationships ### Environment Variables | Variable | Default | Description | |----------|---------|-------------| | `DATABASE_URL` | `mysql://mysqluser:mysqlpassword@db:3306/mydb` | MySQL connection string | +| `DB_HOST` | `db` | Database hostname | +| `DB_PORT` | `3306` | Database port | +| `DB_USER` | `mysqluser` | Database username | +| `DB_PASSWORD` | `mysqlpassword` | Database password | +| `DB_NAME` | `mydb` | Database name | +| `PORT` | `5000` | Flask API port | +| `MYSQL_ROOT_PASSWORD` | `rootpassword` | MySQL root password | + +--- + +## Testing + +### Test Scripts -### Separate Scripts +- **`test_tables.py`** - Validates table creation in a test database +- **`test_locations_supplies.py`** - Tests locations and supplies with interactive GUI +- **`test_gui.py`** - GUI test runner that discovers and runs all test scripts -- **Seed data**: Use separate seed scripts (to be added) for inserting test data -- **Database reset**: Use separate reset scripts (to be added) for dropping/recreating tables +### Running Tests + +```bash +make test +``` + +This opens a GUI window where you can: +- Run individual tests +- Run all tests +- View test results in real-time +- See table contents in interactive viewers --- @@ -49,11 +110,57 @@ The startup script (`src/scripts/app.py`) handles database schema initialization The database includes the following tables: - `teams` - Team definitions -- `locations` - Storage locations +- `locations` - Storage locations (with coordinates for frontend positioning) - `members` - Team members - `weekly_reports` - Member progress reports -- `supplies` - Inventory items +- `supplies` - Inventory items (with unique constraint on name+location) - `orders` - Purchase orders - `applicants` - Applicant information -See `src/sql/` for table definitions. \ No newline at end of file +### Key Features + +- **Supplies normalization**: Each supply can exist in multiple locations with different amounts +- **Unique constraint**: `(name, location)` ensures no duplicate supply entries per location +- **Foreign keys**: Supplies reference locations, maintaining referential integrity + +See `src/sql/` for table definitions. + +--- + +## Available Commands + +| Command | Description | +|---------|-------------| +| `make up` | Start all services | +| `make down` | Stop all services | +| `make build` | Build or rebuild services | +| `make test` | Run GUI test runner | +| `make mysql` | Open MySQL shell | +| `make logs` | View service logs | +| `make status` | Show service status | +| `make clean` | Remove containers, volumes, and networks | + +--- + +## Project Structure + +``` +mil-sql/ +โ”œโ”€โ”€ src/ +โ”‚ โ”œโ”€โ”€ api/ # Flask API application +โ”‚ โ”‚ โ”œโ”€โ”€ app.py # Main Flask app +โ”‚ โ”‚ โ”œโ”€โ”€ db.py # Database connection pool +โ”‚ โ”‚ โ”œโ”€โ”€ models/ # Data models +โ”‚ โ”‚ โ””โ”€โ”€ routes/ # API route handlers +โ”‚ โ”œโ”€โ”€ scripts/ # Utility and test scripts +โ”‚ โ”‚ โ”œโ”€โ”€ helpers.py # Shared database helpers +โ”‚ โ”‚ โ”œโ”€โ”€ test_gui.py # GUI test runner +โ”‚ โ”‚ โ”œโ”€โ”€ test_tables.py +โ”‚ โ”‚ โ””โ”€โ”€ test_locations_supplies.py +โ”‚ โ””โ”€โ”€ sql/ # SQL table definitions +โ”‚ โ””โ”€โ”€ */table_*.sql +โ”œโ”€โ”€ docker-compose.yaml # Service definitions +โ”œโ”€โ”€ Dockerfile # Python API container +โ”œโ”€โ”€ requirements.txt # Python dependencies +โ””โ”€โ”€ Makefile # Convenience commands +``` \ No newline at end of file diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py index e8c28ad..d14a971 100644 --- a/src/scripts/test_locations_supplies.py +++ b/src/scripts/test_locations_supplies.py @@ -338,10 +338,9 @@ def move_supply(from_container, to_container): # Refresh display update_display() - if failed_count == 0: - tk.messagebox.showinfo("Success", f"Moved {moved_count} supply type(s) from {from_container} to {to_container}.") - else: - tk.messagebox.showwarning("Partial Success", f"Moved {moved_count} supply type(s), {failed_count} failed.") + # Only show error messages, not success messages + if failed_count > 0: + tk.messagebox.showerror("Error", f"Failed to move {failed_count} supply type(s). {moved_count} succeeded.") except requests.exceptions.ConnectionError: tk.messagebox.showerror("Error", "Cannot connect to API. Make sure the API is running on http://localhost:5000") From bcbfccec70010ea38325e07adad975f5b11373ce Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 20:51:44 -0500 Subject: [PATCH 18/20] Commands tab added to the test gui and database seeded with box location data --- Makefile | 12 +- README.md | 29 +- docker-compose.yaml | 22 + .../command_drop_production_database.py | 99 +++ src/scripts/helpers.py | 8 + src/scripts/seed_locations.py | 179 +++++ src/scripts/test_gui.py | 147 +++- src/scripts/test_locations_supplies.py | 631 +++++++++++++----- 8 files changed, 970 insertions(+), 157 deletions(-) create mode 100644 src/scripts/command_drop_production_database.py create mode 100644 src/scripts/seed_locations.py diff --git a/Makefile b/Makefile index f167343..1514901 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,18 @@ PROJECT_NAME=mysql_service COMPOSE=docker-compose -p $(PROJECT_NAME) -## up: Start the mysql and api containers in the background +## up: Start the mysql, api (port 5000), and api-test (port 5001) containers and seed default locations .PHONY: up up: $(COMPOSE) up -d + @echo "Waiting for services to be ready..." + @timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true + @$(COMPOSE) exec api python src/scripts/seed_locations.py + +## up-empty: Start the mysql, api (port 5000), and api-test (port 5001) containers without seeding data +.PHONY: up-empty +up-empty: + $(COMPOSE) up -d ## down: Stop and remove containers (keeps volumes) .PHONY: down @@ -26,7 +34,7 @@ logs: mysql: $(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb -## test: Open GUI test runner (starts services if not running) +## test: Open GUI test runner (starts services if not running, including test API on port 5001) .PHONY: test test: @echo "Ensuring services are running..." diff --git a/README.md b/README.md index e7a738d..31a528d 100644 --- a/README.md +++ b/README.md @@ -21,8 +21,9 @@ make up This will: - Start a MySQL 8.0 database container -- Start a Flask API container on port 5000 -- Automatically initialize the database schema (idempotent - safe to run multiple times) +- Start a Flask API container on port 5000 (production database: `mydb`) +- Start a Flask API container on port 5001 (test database: `mydb_test`) +- Automatically initialize the database schema for both APIs (idempotent - safe to run multiple times) ### 2. Run tests ```bash @@ -38,7 +39,11 @@ Opens a GUI test runner that allows you to: ## API Endpoints -The Flask API runs on `http://localhost:5000` and provides the following endpoints: +The Flask API runs on two ports: +- **Production API**: `http://localhost:5000` (database: `mydb`) +- **Test API**: `http://localhost:5001` (database: `mydb_test`) + +Both APIs provide the same endpoints: ### Locations - `GET /api/locations` - Get all locations @@ -86,6 +91,22 @@ The Flask API (`src/api/app.py`) handles database schema initialization automati ## Testing +### Test API + +A separate test API instance runs on port 5001 and connects to the `mydb_test` database. This allows test scripts to: +- Use the API endpoints without affecting production data +- Test API functionality in isolation +- Access the test database through the same API interface as production + +Test scripts can use the `TEST_API_URL` constant from `helpers.py`: +```python +from helpers import TEST_API_URL +import requests + +# Use test API (http://localhost:5001/api) +response = requests.get(f"{TEST_API_URL}/locations") +``` + ### Test Scripts - **`test_tables.py`** - Validates table creation in a test database @@ -104,6 +125,8 @@ This opens a GUI window where you can: - View test results in real-time - See table contents in interactive viewers +**Note**: The test API (`api-test` service) starts automatically with `make up` or `make test` and connects to the `mydb_test` database. + --- ## Database Schema diff --git a/docker-compose.yaml b/docker-compose.yaml index a8b43ac..86bfdd7 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -31,6 +31,7 @@ services: DB_USER: mysqluser DB_PASSWORD: mysqlpassword DB_NAME: mydb + MYSQL_ROOT_PASSWORD: rootpassword PORT: 5000 ports: - "5000:5000" @@ -38,5 +39,26 @@ services: - .:/app command: ["python", "-m", "src.api.app"] + api-test: + build: . + container_name: mysql_api_test + depends_on: + db: + condition: service_healthy + environment: + DATABASE_URL: mysql://mysqluser:mysqlpassword@db:3306/mydb_test + DB_HOST: db + DB_PORT: 3306 + DB_USER: mysqluser + DB_PASSWORD: mysqlpassword + DB_NAME: mydb_test + MYSQL_ROOT_PASSWORD: rootpassword + PORT: 5001 + ports: + - "5001:5001" + volumes: + - .:/app + command: ["python", "-m", "src.api.app"] + volumes: db_data: diff --git a/src/scripts/command_drop_production_database.py b/src/scripts/command_drop_production_database.py new file mode 100644 index 0000000..724ad08 --- /dev/null +++ b/src/scripts/command_drop_production_database.py @@ -0,0 +1,99 @@ +""" +Command script to drop the production database. + +โš ๏ธ WARNING: This will permanently delete all data in the production database! +This action cannot be undone. + +Usage: + python src/scripts/command_drop_production_database.py +""" +import os +import sys +import mysql.connector +from helpers import parse_database_url + +# Get database URL +DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + + +def drop_production_database(): + """Drop the production database.""" + try: + # Parse database URL + db_params = parse_database_url(DATABASE_URL) + database_name = db_params['database'] + + print("โš ๏ธ WARNING: You are about to drop the production database!") + print(f" Database: {database_name}") + print(f" Host: {db_params['host']}:{db_params['port']}") + print() + + # Get root password for database operations + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root to drop database + root_conn_params = { + 'host': db_params['host'], + 'port': db_params['port'], + 'user': 'root', + 'password': root_password + } + + print("๐Ÿ”Œ Connecting to MySQL server as root...") + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + + # Verify connection + root_cur.execute("SELECT VERSION();") + version = root_cur.fetchone()[0] + print(f"โœ“ Connected to MySQL: {version}") + + # Check if database exists + root_cur.execute( + "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA WHERE SCHEMA_NAME = %s", + (database_name,) + ) + db_exists = root_cur.fetchone() is not None + + if not db_exists: + print(f"\nโš ๏ธ Database '{database_name}' does not exist. Nothing to drop.") + root_cur.close() + root_conn.close() + return + + # Get list of tables before dropping + root_cur.execute(f"USE `{database_name}`") + root_cur.execute("SHOW TABLES") + tables = root_cur.fetchall() + table_count = len(tables) + + print(f"\n๐Ÿ“Š Database '{database_name}' contains {table_count} table(s):") + if tables: + for table in tables[:10]: # Show first 10 + print(f" - {table[0]}") + if table_count > 10: + print(f" ... and {table_count - 10} more") + + print(f"\n๐Ÿ—‘๏ธ Dropping database '{database_name}'...") + root_cur.execute(f"DROP DATABASE IF EXISTS `{database_name}`") + root_conn.commit() + + print(f"โœ“ Database '{database_name}' has been dropped successfully!") + print(f" All {table_count} table(s) and all data have been permanently deleted.") + + root_cur.close() + root_conn.close() + + except mysql.connector.Error as e: + print(f"\nโœ— Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"\nโœ— Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + drop_production_database() + diff --git a/src/scripts/helpers.py b/src/scripts/helpers.py index db3e90a..9680925 100644 --- a/src/scripts/helpers.py +++ b/src/scripts/helpers.py @@ -8,6 +8,14 @@ import re from urllib.parse import urlparse from collections import defaultdict, deque +import os + +# API URLs for test scripts +# Production API (port 5000, database: mydb) +API_URL = os.getenv("API_URL", "http://localhost:5000/api") + +# Test API (port 5001, database: mydb_test) +TEST_API_URL = os.getenv("TEST_API_URL", "http://localhost:5001/api") def get_sql_base_path(script_file): diff --git a/src/scripts/seed_locations.py b/src/scripts/seed_locations.py new file mode 100644 index 0000000..9ce7b7f --- /dev/null +++ b/src/scripts/seed_locations.py @@ -0,0 +1,179 @@ +""" +Seed default locations into the database. +This script is idempotent - it only inserts if no locations exist. +""" +import os +import sys +from pathlib import Path + +# Add src/scripts to path for imports (so we can import helpers directly) +sys.path.insert(0, str(Path(__file__).parent)) + +import mysql.connector +import time +from helpers import parse_database_url, get_sql_base_path, execute_sql_file, table_exists + +# Default locations from milventory frontend (InventoryContext.js) +DEFAULT_LOCATIONS = [ + {'name': 'Workbench', 'x': 140, 'y': 300, 'width': 150, 'height': 170, 'type': 'workbench'}, + {'name': 'File Cabinet A', 'x': 140, 'y': 700, 'width': 200, 'height': 260, 'type': 'cabinet'}, + {'name': 'File Cabinet B', 'x': 140, 'y': 1000, 'width': 200, 'height': 260, 'type': 'cabinet'}, + {'name': 'Drawer T1', 'x': 200, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T2', 'x': 410, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T3', 'x': 620, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T4', 'x': 830, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T5', 'x': 1040, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer T6', 'x': 1250, 'y': 120, 'width': 190, 'height': 120, 'type': 'drawer'}, + {'name': 'Drawer R1', 'x': 1340, 'y': 320, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R2', 'x': 1340, 'y': 520, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R3', 'x': 1340, 'y': 720, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R4', 'x': 1340, 'y': 920, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Drawer R5', 'x': 1340, 'y': 1120, 'width': 170, 'height': 170, 'type': 'drawer'}, + {'name': 'Table A', 'x': 420, 'y': 520, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table B', 'x': 880, 'y': 520, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table C', 'x': 420, 'y': 940, 'width': 300, 'height': 200, 'type': 'table'}, + {'name': 'Table D', 'x': 880, 'y': 940, 'width': 300, 'height': 200, 'type': 'table'}, +] + + +def seed_locations(): + """Seed default locations if none exist.""" + try: + # Get database connection parameters + database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") + db_params = parse_database_url(database_url) + database_name = db_params['database'] + + # Get root password for database creation + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # First, ensure the database exists (connect as root to create it if needed) + root_conn_params = { + 'host': db_params['host'], + 'port': db_params['port'], + 'user': 'root', + 'password': root_password + } + + try: + # Try to connect to the database first + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + cur.close() + conn.close() + except mysql.connector.Error as e: + # If database doesn't exist, create it + if 'Unknown database' in str(e) or '1049' in str(e): + print(f"๐Ÿ“ฆ Database '{database_name}' does not exist, creating it...") + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"CREATE DATABASE IF NOT EXISTS `{database_name}`") + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{database_name}`.* TO '{db_params['user']}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + root_cur.close() + root_conn.close() + print(f"โœ“ Database '{database_name}' created") + # Small delay to ensure privileges are propagated + import time + time.sleep(0.5) + else: + raise + + print("๐ŸŒฑ Checking for existing locations...") + + # Connect to database + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + + # Check if locations table exists, create it if needed + if not table_exists(cur, 'locations'): + print("โš  Locations table does not exist. Creating it...") + # Get SQL base path and find locations table file + sql_base_path = get_sql_base_path(__file__) + # Try both possible filenames + locations_file = sql_base_path / "location" / "table_locations.sql" + if not locations_file.exists(): + locations_file = sql_base_path / "location" / "table_location.sql" + + if locations_file.exists(): + if execute_sql_file(cur, locations_file, "locations table"): + conn.commit() + print("โœ“ Locations table created") + else: + print("โœ— Failed to create locations table") + cur.close() + conn.close() + sys.exit(1) + else: + print(f"โœ— Locations table SQL file not found at {locations_file}") + print(" Waiting for API to create it...") + # Wait and retry a few times + cur.close() + conn.close() + for attempt in range(5): + time.sleep(2) + try: + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + if table_exists(cur, 'locations'): + print("โœ“ Locations table now exists (created by API)") + break + cur.close() + conn.close() + except: + pass + else: + # Final check + conn = mysql.connector.connect(**db_params) + cur = conn.cursor() + if not table_exists(cur, 'locations'): + print("โœ— Locations table still does not exist after waiting.") + cur.close() + conn.close() + sys.exit(1) + + # Check if any locations exist + cur.execute("SELECT COUNT(*) FROM locations") + count = cur.fetchone()[0] + + if count > 0: + print(f"โœ“ Found {count} existing location(s), skipping seed") + cur.close() + conn.close() + return + + # Insert default locations + print(f"๐Ÿ“ฆ Seeding {len(DEFAULT_LOCATIONS)} default locations...") + + insert_count = 0 + for loc in DEFAULT_LOCATIONS: + try: + cur.execute( + "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", + (loc['name'], loc['x'], loc['y'], loc['width'], loc['height'], loc['type']) + ) + insert_count += 1 + except mysql.connector.IntegrityError: + # Skip if already exists (shouldn't happen, but be safe) + print(f" โš  {loc['name']} already exists, skipping") + + conn.commit() + print(f"โœ“ Successfully seeded {insert_count} location(s)") + + cur.close() + conn.close() + + except mysql.connector.Error as e: + print(f"โœ— Database error: {e}") + sys.exit(1) + except Exception as e: + print(f"โœ— Unexpected error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + seed_locations() + diff --git a/src/scripts/test_gui.py b/src/scripts/test_gui.py index 6b22169..34e3914 100644 --- a/src/scripts/test_gui.py +++ b/src/scripts/test_gui.py @@ -8,7 +8,7 @@ python src/scripts/test_gui.py """ import tkinter as tk -from tkinter import scrolledtext, ttk +from tkinter import scrolledtext, ttk, messagebox import subprocess import sys import os @@ -29,6 +29,8 @@ def __init__(self, root): # Discover test files self.test_files = self.discover_tests() + # Discover command files + self.command_files = self.discover_commands() # Create UI self.create_ui() @@ -45,12 +47,24 @@ def discover_tests(self): test_files.append((test_name, test_file)) return sorted(test_files) + def discover_commands(self): + """Discover all command_*.py files in the scripts directory.""" + command_files = [] + for command_file in self.scripts_dir.glob("command_*.py"): + # Extract command name from filename: command_.py -> + match = re.match(r"command_(.+)\.py$", command_file.name, re.IGNORECASE) + if match: + command_name = match.group(1).replace("_", " ").title() + command_files.append((command_name, command_file)) + return sorted(command_files) + def create_ui(self): """Create the user interface.""" # Top frame for buttons button_frame = tk.Frame(self.root, padx=10, pady=10) button_frame.pack(fill=tk.X) + # Tests section tk.Label(button_frame, text="Available Tests:", font=("Arial", 12, "bold")).pack(anchor=tk.W) # Create buttons for each test @@ -83,6 +97,30 @@ def create_ui(self): fg="white" ).pack(side=tk.LEFT, padx=5, pady=5) + # Commands section (separate from tests) + if self.command_files: + # Separator + separator = tk.Frame(button_frame, height=2, bg="gray", relief=tk.SUNKEN) + separator.pack(fill=tk.X, pady=15) + + tk.Label(button_frame, text="Commands:", font=("Arial", 12, "bold"), fg="#d32f2f").pack(anchor=tk.W, pady=(10, 5)) + + commands_frame = tk.Frame(button_frame) + commands_frame.pack(fill=tk.X, pady=5) + + for command_name, command_file in self.command_files: + btn = tk.Button( + commands_frame, + text=f"โš  {command_name}", + command=lambda cf=command_file, cn=command_name: self.run_command(cf, cn), + width=20, + height=2, + font=("Arial", 10), + bg="#d32f2f", + fg="white" + ) + btn.pack(side=tk.LEFT, padx=5, pady=5) + # Output area output_frame = tk.Frame(self.root, padx=10, pady=10) output_frame.pack(fill=tk.BOTH, expand=True) @@ -142,6 +180,80 @@ def run_test(self, test_file): thread = threading.Thread(target=self._run_test_thread, args=(test_file,), daemon=True) thread.start() + def run_command(self, command_file, command_name): + """Run a command file with a warning dialog.""" + # Show warning dialog + warning_msg = ( + f"โš ๏ธ WARNING: You are about to execute a command!\n\n" + f"Command: {command_name}\n" + f"File: {command_file.name}\n\n" + f"This action may modify or delete data.\n" + f"Are you sure you want to continue?" + ) + + result = messagebox.askyesno( + "Confirm Command Execution", + warning_msg, + icon="warning", + default="no" + ) + + if not result: + self.log(f"\nโš  Command '{command_name}' was cancelled by user.") + return + + # User confirmed - proceed with execution + self.log(f"\n{'='*60}") + self.log(f"Executing Command: {command_name}") + self.log(f"File: {command_file.name}") + self.log(f"{'='*60}\n") + self.status_var.set(f"Executing: {command_name}...") + + # Run command in a separate thread to avoid blocking UI + thread = threading.Thread(target=self._run_command_thread, args=(command_file, command_name), daemon=True) + thread.start() + + def _run_command_thread(self, command_file, command_name): + """Run command in a separate thread and capture output.""" + try: + # Get the command filename relative to scripts directory + command_filename = command_file.name + + # Run command in Docker container + # Use docker-compose exec to run the command in the api container + compose_cmd = ["docker-compose", "-p", "mysql_service", "exec", "-T", "api", "python", f"src/scripts/{command_filename}"] + + # Run the command + process = subprocess.Popen( + compose_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + encoding='utf-8', + errors='replace', # Replace invalid characters instead of failing + bufsize=1, + universal_newlines=True, + cwd=str(self.scripts_dir.parent.parent) # Run from project root + ) + + # Stream output in real-time + for line in process.stdout: + line_stripped = line.rstrip() + self.log(line_stripped) + + process.wait() + + if process.returncode == 0: + self.log(f"\nโœ“ Command completed successfully!", "success") + self.status_var.set("Command completed!") + else: + self.log(f"\nโœ— Command failed with exit code {process.returncode}", "error") + self.status_var.set("Command failed!") + + except Exception as e: + self.log(f"\nโœ— Error running command: {e}", "error") + self.status_var.set(f"Error: {str(e)}") + def _run_test_thread(self, test_file): """Run test in a separate thread and capture output.""" try: @@ -219,6 +331,7 @@ def _open_table_viewer(self, table_data): try: # Import the interactive viewer from test_locations_supplies import importlib.util + import os viewer_path = self.scripts_dir / "test_locations_supplies.py" spec = importlib.util.spec_from_file_location("test_locations_supplies", viewer_path) test_module = importlib.util.module_from_spec(spec) @@ -228,8 +341,36 @@ def _open_table_viewer(self, table_data): locations_data = [tuple(row) for row in table_data['locations']['data']] supplies_data = [tuple(row) for row in table_data['supplies']['data']] - # Use the interactive viewer - test_module.create_table_viewer(locations_data, supplies_data) + # Create cleanup function to drop test database + def cleanup_test_db(): + """Clean up test database when viewer closes.""" + try: + import mysql.connector + from helpers import parse_database_url + + database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@localhost:3306/mydb") + base_params = parse_database_url(database_url) + test_db_name = f"{base_params['database']}_test" + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root to drop database + root_conn = mysql.connector.connect( + host=base_params['host'] if base_params['host'] != 'db' else 'localhost', + port=base_params['port'], + user='root', + password=root_password + ) + root_cur = root_conn.cursor() + root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + root_conn.commit() + root_cur.close() + root_conn.close() + self.log(f"\nโœ“ Test database '{test_db_name}' cleaned up") + except Exception as e: + self.log(f"\nโš  Could not clean up test database: {e}") + + # Use the interactive viewer with cleanup function + test_module.create_table_viewer(locations_data, supplies_data, cleanup_callback=cleanup_test_db) except Exception as e: # Fallback to simple viewer self.log(f"\nโš  Could not load interactive viewer, using simple viewer: {e}", "error") diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py index d14a971..4d7fb53 100644 --- a/src/scripts/test_locations_supplies.py +++ b/src/scripts/test_locations_supplies.py @@ -13,11 +13,19 @@ import os import sys import json +import time import mysql.connector import mysql.connector.errors import tkinter.messagebox import requests -from helpers import parse_database_url, table_exists +from helpers import ( + parse_database_url, + table_exists, + get_sql_base_path, + discover_table_files, + topological_sort_tables, + execute_sql_file +) # Try to import tkinter, but don't fail if not available (e.g., in Docker) try: @@ -27,51 +35,80 @@ except ImportError: HAS_TKINTER = False -# Get database URL -DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") +# Get base database URL - test will use a separate test database +BASE_DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") +# Base path for SQL files (works in Docker and locally) +SQL_BASE_PATH = get_sql_base_path(__file__) -def get_connection(): - """Get database connection.""" - db_params = parse_database_url(DATABASE_URL) - return mysql.connector.connect(**db_params) +def drop_all_tables(cur, database_name): + """Drop all tables from the test database.""" + print(f"\n๐Ÿ—‘๏ธ Dropping all existing tables from '{database_name}'...") + + # Get all table names from the database + cur.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = %s", + (database_name,) + ) + tables = cur.fetchall() + + if not tables: + print(" โŠ˜ No tables to drop") + return + + table_names = [table[0] for table in tables] + print(f" Found {len(table_names)} table(s) to drop") + + # Disable foreign key checks to avoid constraint issues + cur.execute("SET FOREIGN_KEY_CHECKS = 0") + + dropped_count = 0 + for table_name in table_names: + try: + cur.execute(f"DROP TABLE IF EXISTS `{table_name}`") + dropped_count += 1 + except Exception as e: + print(f" โš  Warning: Failed to drop table '{table_name}': {e}") + + # Re-enable foreign key checks + cur.execute("SET FOREIGN_KEY_CHECKS = 1") + + print(f" โœ“ Dropped {dropped_count}/{len(table_names)} table(s)") -def ensure_tables_exist(cur): - """Ensure locations and supplies tables exist, create if needed.""" - from helpers import get_sql_base_path, discover_table_files, topological_sort_tables, execute_sql_file + +def initialize_schema(cur, database_name=None): + """Initialize database schema in correct dependency order.""" + print("\n๐Ÿ“‹ Discovering table files...") - SQL_BASE_PATH = get_sql_base_path(__file__) + # Discover all table_*.sql files recursively table_files = discover_table_files(SQL_BASE_PATH) - # Find locations and supplies tables - locations_file = None - supplies_file = None - - for table_name, sql_file in table_files: - if table_name.lower() == 'locations': - locations_file = sql_file - elif table_name.lower() == 'supplies': - supplies_file = sql_file - - # Create tables if they don't exist - if not table_exists(cur, 'locations'): - if locations_file: - print("๐Ÿ“‹ Creating locations table...") - execute_sql_file(cur, locations_file, "locations table") - else: - print("โœ— locations table not found and cannot be created") - return False + if not table_files: + print(f"โš  No table_*.sql files found in {SQL_BASE_PATH}") + return 0, 0 - if not table_exists(cur, 'supplies'): - if supplies_file: - print("๐Ÿ“‹ Creating supplies table...") - execute_sql_file(cur, supplies_file, "supplies table") - else: - print("โœ— supplies table not found and cannot be created") - return False + print(f"โœ“ Found {len(table_files)} table file(s)") - return True + # Sort tables by dependency order + print("๐Ÿ“Š Analyzing dependencies...") + sorted_tables = topological_sort_tables(table_files) + + print("\n๐Ÿ“‹ Initializing database schema...") + + success_count = 0 + for table_name, sql_file in sorted_tables: + description = f"{table_name} table" + # Check if table already exists (for idempotency) + if table_exists(cur, table_name, database_name): + print(f"โŠ˜ {description} already exists, skipping") + continue + + if execute_sql_file(cur, sql_file, description): + success_count += 1 + + print(f"\nโœ“ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") + return success_count, len(sorted_tables) def insert_sample_locations(cur): @@ -176,70 +213,141 @@ def display_table_contents(table_name, columns, rows): print(f"\n Total rows: {len(rows)}") -def create_table_viewer(locations_data, supplies_data): +def create_table_viewer(locations_data, supplies_data, test_db_name=None, base_params=None, cleanup_callback=None): """Create an interactive tkinter window to display and move supplies between containers.""" root = tk.Tk() root.title("Container Supplies - Move Supplies Between Containers") root.geometry("1000x700") + # Set up cleanup when window closes + cleanup_called = {'value': False} + + def on_closing(): + if cleanup_callback and not cleanup_called['value']: + cleanup_called['value'] = True + cleanup_callback() + root.destroy() + + root.protocol("WM_DELETE_WINDOW", on_closing) + # Get container names container_names = [row[0] for row in locations_data if 'Container' in row[0]] if len(container_names) < 3: container_names = ['Container A', 'Container B', 'Container C'] + # Test database connection at startup + db_connection_available = False + try: + # Try to connect to test if connection is available + if test_db_name is None or base_params is None: + base_params_test = parse_database_url(BASE_DATABASE_URL) + test_db_name_test = f"{base_params_test['database']}_test" + else: + base_params_test = base_params + test_db_name_test = test_db_name + + db_params_test = { + 'host': base_params_test['host'] if base_params_test['host'] != 'db' else 'localhost', + 'port': base_params_test['port'], + 'user': base_params_test['user'], + 'password': base_params_test['password'], + 'database': test_db_name_test + } + + # Try to connect with different auth plugins + auth_plugins = [None, 'caching_sha2_password', 'mysql_native_password'] + for auth_plugin in auth_plugins: + try: + test_params = db_params_test.copy() + if auth_plugin: + test_params['auth_plugin'] = auth_plugin + test_conn = mysql.connector.connect(**test_params) + test_conn.close() + db_connection_available = True + break + except: + continue + except: + db_connection_available = False + # Database connection for refreshing data - # When running locally (not in Docker), use localhost instead of 'db' + # Use test database only (passed from main function) def get_db_connection(): - db_params = parse_database_url(DATABASE_URL) + if not db_connection_available: + raise Exception("Database connection not available") + + # Use parameters passed from main function + if test_db_name is None or base_params is None: + # Fallback to parsing from environment + base_params_fallback = parse_database_url(BASE_DATABASE_URL) + test_db_name_fallback = f"{base_params_fallback['database']}_test" + else: + base_params_fallback = base_params + test_db_name_fallback = test_db_name + + db_params = { + 'host': base_params_fallback['host'], + 'port': base_params_fallback['port'], + 'user': base_params_fallback['user'], + 'password': base_params_fallback['password'], + 'database': test_db_name_fallback + } # If host is 'db' and we're not in Docker, use localhost if db_params.get('host') == 'db' and not os.path.exists("/app"): db_params['host'] = 'localhost' - # Try to connect - if auth plugin fails, try without specifying it - try: - return mysql.connector.connect(**db_params) - except mysql.connector.errors.DatabaseError as e: - if 'auth' in str(e).lower() or 'plugin' in str(e).lower(): - # Try with different auth plugin - db_params['auth_plugin'] = 'caching_sha2_password' - try: - return mysql.connector.connect(**db_params) - except: - # Last resort: try mysql_native_password - db_params['auth_plugin'] = 'mysql_native_password' - return mysql.connector.connect(**db_params) - raise + + # Try different authentication plugins in order + auth_plugins = [None, 'caching_sha2_password', 'mysql_native_password'] + last_error = None + + for auth_plugin in auth_plugins: + try: + test_params = db_params.copy() + if auth_plugin: + test_params['auth_plugin'] = auth_plugin + return mysql.connector.connect(**test_params) + except mysql.connector.errors.DatabaseError as e: + last_error = e + error_str = str(e).lower() + # If it's an auth/plugin error, try next plugin + if 'auth' in error_str or 'plugin' in error_str: + continue + # If it's a different error, raise it immediately + raise + except Exception as e: + last_error = e + # For non-auth errors, try next plugin anyway + continue + + # If all plugins failed, raise the last error + if last_error: + raise last_error + raise Exception("Failed to connect to database with any authentication plugin") def refresh_supplies(): - """Refresh supplies data from API - only for our 3 containers.""" + """Refresh supplies data from test database - only for our 3 containers.""" + if not db_connection_available: + return [] # Return empty if connection not available + try: - api_url = "http://localhost:5000/api/supplies" - response = requests.get(api_url) - - if response.status_code != 200: - print(f"Warning: API returned {response.status_code}: {response.text}") - return [] + # Read directly from test database, not API + conn = get_db_connection() + cur = conn.cursor() - all_supplies = response.json() + # Get supplies from test database for our containers + placeholders = ','.join(['%s'] * len(container_names)) + cur.execute( + f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", + container_names + ) + supplies = cur.fetchall() - # Filter to only our containers and convert to tuple format - filtered_supplies = [] - for supply in all_supplies: - if supply.get('location') in container_names: - # Convert to tuple format: (id, name, amount, last_order_date, location) - filtered_supplies.append(( - supply.get('id'), - supply.get('name'), - supply.get('amount'), - supply.get('last_order_date'), - supply.get('location') - )) + cur.close() + conn.close() - return filtered_supplies - except requests.exceptions.ConnectionError: - print("Warning: Cannot connect to API, returning empty list") - return [] + return list(supplies) except Exception as e: - print(f"Warning: Error fetching supplies from API: {e}") + print(f"Warning: Error fetching supplies from test database: {e}") return [] def update_display(supplies_rows=None): @@ -292,48 +400,113 @@ def update_display(supplies_rows=None): container_labels[container_name].config(text=f"{container_name} (empty)") def move_supply(from_container, to_container): - """Move all supplies from one container to another using the API.""" + """Move all supplies from one container to another in test database.""" + if not db_connection_available: + tk.messagebox.showwarning( + "Database Unavailable", + "Cannot move supplies: Database connection is not available.\n\n" + "This may be due to MySQL authentication issues when connecting from Windows to Docker.\n" + "The data is displayed in read-only mode." + ) + return + + conn = None try: - # Get all supplies in source container using API - api_url = "http://localhost:5000/api/supplies" - response = requests.get(api_url, params={"location": from_container}) + # Get all supplies in source container from test database + # Create a fresh connection to avoid any auth issues + conn = get_db_connection() + cur = conn.cursor() - if response.status_code != 200: - tk.messagebox.showerror("Error", f"Failed to fetch supplies: {response.text}") - return - - supplies_to_move = response.json() + cur.execute( + "SELECT id, name, amount, last_order_date, location FROM supplies WHERE location = %s", + (from_container,) + ) + supplies_to_move = cur.fetchall() if not supplies_to_move: tk.messagebox.showinfo("Info", f"No supplies in {from_container} to move.") + cur.close() + if conn: + conn.close() return - # Move each supply using the API move endpoint + # Move each supply in the test database moved_count = 0 failed_count = 0 for supply in supplies_to_move: + supply_id, name, amount, last_order_date, location = supply try: - # Move all of this supply - omit amount to move all - move_payload = { - "name": supply["name"], - "from_location": from_container, - "to_location": to_container - } - # Don't include amount - API will move all if amount is not provided - move_response = requests.post( - "http://localhost:5000/api/supplies/move", - json=move_payload + # Check if supply already exists in target location + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (name, to_container) ) + existing = cur.fetchone() + + if existing: + # Merge: add amounts + existing_id, existing_amount = existing + new_amount = existing_amount + amount + cur.execute( + "UPDATE supplies SET amount = %s WHERE id = %s", + (new_amount, existing_id) + ) + # Delete from source + cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) + else: + # Move: update location + cur.execute( + "UPDATE supplies SET location = %s WHERE id = %s", + (to_container, supply_id) + ) - if move_response.status_code == 200: - moved_count += 1 + moved_count += 1 + except mysql.connector.errors.DatabaseError as e: + failed_count += 1 + error_str = str(e).lower() + # If it's an auth/packet error, try to reconnect + if 'malformed packet' in error_str or 'auth' in error_str or 'plugin' in error_str: + print(f"Connection error moving {name}, will retry with fresh connection: {e}") + # Close current connection and try again with fresh one + try: + cur.close() + if conn: + conn.close() + except: + pass + # Retry with fresh connection + try: + conn = get_db_connection() + cur = conn.cursor() + # Re-check if supply exists in target (need to re-query) + cur.execute( + "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", + (name, to_container) + ) + existing_retry = cur.fetchone() + # Retry the operation + if existing_retry: + existing_id, existing_amount = existing_retry + new_amount = existing_amount + amount + cur.execute("UPDATE supplies SET amount = %s WHERE id = %s", (new_amount, existing_id)) + cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) + else: + cur.execute("UPDATE supplies SET location = %s WHERE id = %s", (to_container, supply_id)) + moved_count += 1 + failed_count -= 1 # Adjust count since retry succeeded + except Exception as retry_e: + print(f"Retry also failed for {name}: {retry_e}") else: - failed_count += 1 - print(f"Failed to move {supply['name']}: {move_response.text}") + print(f"Error moving {name}: {e}") except Exception as e: failed_count += 1 - print(f"Error moving {supply['name']}: {e}") + print(f"Error moving {name}: {e}") + + if conn: + conn.commit() + cur.close() + conn.close() # Refresh display update_display() @@ -342,15 +515,49 @@ def move_supply(from_container, to_container): if failed_count > 0: tk.messagebox.showerror("Error", f"Failed to move {failed_count} supply type(s). {moved_count} succeeded.") - except requests.exceptions.ConnectionError: - tk.messagebox.showerror("Error", "Cannot connect to API. Make sure the API is running on http://localhost:5000") + except mysql.connector.errors.DatabaseError as e: + error_str = str(e).lower() + if 'malformed packet' in error_str or 'auth' in error_str: + # Try one more time with a completely fresh connection + try: + if conn: + try: + conn.close() + except: + pass + conn = get_db_connection() + # If we can get a connection, show a more helpful message + conn.close() + tk.messagebox.showerror("Error", f"Database connection issue. Please try again. Error: {str(e)}") + except Exception as cleanup_e: + tk.messagebox.showerror("Error", f"Failed to connect to database. Make sure MySQL is running. Original error: {str(e)}, Cleanup error: {str(cleanup_e)}") + else: + tk.messagebox.showerror("Error", f"Failed to move supplies: {str(e)}") except Exception as e: tk.messagebox.showerror("Error", f"Failed to move supplies: {str(e)}") + finally: + # Ensure connection is closed + if conn: + try: + conn.close() + except: + pass # Main container main_frame = tk.Frame(root, padx=20, pady=20) main_frame.pack(fill=tk.BOTH, expand=True) + # Show warning if database connection unavailable + if not db_connection_available: + warning_label = tk.Label( + main_frame, + text="โš  Database connection unavailable - Move operations disabled\n(Data is read-only)", + font=("Arial", 10), + fg="orange", + bg="yellow" + ) + warning_label.pack(pady=10) + # Title title_label = tk.Label(main_frame, text="Container Supplies", font=("Arial", 16, "bold")) title_label.pack(pady=(0, 20)) @@ -392,18 +599,20 @@ def move_supply(from_container, to_container): width=20, height=2, font=("Arial", 10, "bold"), - bg="#4CAF50", - fg="white" + bg="#4CAF50" if db_connection_available else "#cccccc", + fg="white", + state=tk.NORMAL if db_connection_available else tk.DISABLED ) btn.pack(pady=5, fill=tk.X) - # Refresh button + # Refresh button (only enabled if DB connection available) refresh_btn = tk.Button( main_frame, text="Refresh", command=update_display, width=15, - height=2 + height=2, + state=tk.NORMAL if db_connection_available else tk.DISABLED ) refresh_btn.pack(pady=10) @@ -430,52 +639,118 @@ def move_supply(from_container, to_container): def main(): - """Main test function.""" + """Main test function - uses test database only, deletes and recreates it.""" + root_conn = None + conn = None try: + # Parse base database URL + base_params = parse_database_url(BASE_DATABASE_URL) + test_db_name = f"{base_params['database']}_test" + print("๐Ÿงช Starting Locations & Supplies Data Test") + print(f"๐Ÿ“Š Test database: {test_db_name}") print("=" * 60) + print(f"๐Ÿ”Œ Connecting to MySQL server...") - # Connect to database - print("\n๐Ÿ”Œ Connecting to database...") - conn = get_connection() - cur = conn.cursor() + # Use root credentials for database operations + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + + # Connect as root + root_conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': 'root', + 'password': root_password + } + + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() # Verify connection - cur.execute("SELECT VERSION();") - version = cur.fetchone()[0] + root_cur.execute("SELECT VERSION();") + version = root_cur.fetchone()[0] print(f"โœ“ Connected to MySQL: {version}") - # Ensure tables exist - print("\n๐Ÿ” Checking tables...") - if not ensure_tables_exist(cur): - print("โœ— Required tables not available") - conn.rollback() - cur.close() - conn.close() - sys.exit(1) + # STEP 1: Drop test database if it exists (clean start) + print(f"\n๐Ÿ—‘๏ธ Dropping test database '{test_db_name}' if it exists...") + root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + root_conn.commit() + print(f"โœ“ Test database dropped (if it existed)") + + # Small delay to ensure database is fully dropped + time.sleep(0.5) + + # STEP 2: Create fresh test database + print(f"\n๐Ÿ“ฆ Creating fresh test database '{test_db_name}'...") + root_cur.execute(f"CREATE DATABASE `{test_db_name}`") + + # Grant permissions to mysqluser on the test database + username = base_params['user'] + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + print(f"โœ“ Test database created and permissions granted") + + root_cur.close() + root_conn.close() + root_conn = None + + # Small delay to ensure privileges are propagated + time.sleep(0.5) + + # STEP 3: Connect as mysqluser to test database + print(f"\n๐Ÿ”Œ Connecting as '{base_params['user']}' to test database...") + conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': base_params['user'], + 'password': base_params['password'], + 'database': test_db_name + } + + try: + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"โœ“ Connected successfully to test database") + except mysql.connector.Error as e: + print(f"โœ— Failed to connect as {base_params['user']}: {e}") + print(f" Attempting to verify privileges...") + # Try to reconnect as root to check if user exists + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"SELECT User, Host FROM mysql.user WHERE User = '{username}'") + users = root_cur.fetchall() + if not users: + print(f" โš  User '{username}' does not exist. Creating user...") + root_cur.execute(f"CREATE USER IF NOT EXISTS '{username}'@'%' IDENTIFIED BY '{base_params['password']}'") + root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") + root_cur.execute("FLUSH PRIVILEGES") + root_conn.commit() + root_cur.close() + root_conn.close() + root_conn = None + time.sleep(0.5) + # Retry connection + conn = mysql.connector.connect(**conn_params) + cur = conn.cursor() + print(f"โœ“ Connected successfully after creating user") + else: + raise + + # STEP 4: Initialize schema in test database + created_count, total_count = initialize_schema(cur, test_db_name) conn.commit() - # Insert sample locations + # STEP 5: Insert sample locations loc_inserted, loc_skipped = insert_sample_locations(cur) conn.commit() - # Clear old supplies for our containers to start fresh + # STEP 6: Insert sample supplies container_names = ['Container A', 'Container B', 'Container C'] - print("\n๐Ÿ—‘๏ธ Clearing old supplies for test containers...") - placeholders = ','.join(['%s'] * len(container_names)) - cur.execute( - f"DELETE FROM supplies WHERE location IN ({placeholders})", - container_names - ) - cleared_count = cur.rowcount - conn.commit() - print(f" โœ“ Cleared {cleared_count} old supply entry/entries") - - # Insert sample supplies sup_inserted, sup_failed = insert_sample_supplies(cur) conn.commit() - # Get table contents for GUI display - only our 3 containers + # STEP 7: Get table contents for GUI display - only our 3 containers print("\n๐Ÿ“Š Fetching table contents...") placeholders = ','.join(['%s'] * len(container_names)) cur.execute( @@ -502,11 +777,13 @@ def main(): print(f" Supplies: {sup_inserted} inserted, {sup_failed} failed") print("=" * 60) + # Close user connection - database stays alive for GUI cur.close() conn.close() + conn = None + # IMPORTANT: Database must remain alive for GUI to use it! # Convert data for JSON output (for test_gui.py to display locally) - # Convert tuples to lists for JSON serialization locations_json = [list(row) for row in locations_data] supplies_json = [list(row) for row in supplies_data] @@ -533,31 +810,87 @@ def main(): print("TABLE_DATA_JSON_END") print("=" * 60) - # Try to open GUI window locally if tkinter is available and we're not in Docker - if HAS_TKINTER and not os.path.exists("/app"): - print("\n๐ŸชŸ Opening table viewer window...") - try: - create_table_viewer(locations_data, supplies_data) - except Exception as e: - print(f"โš  Could not open GUI window: {e}") - print(" Table contents displayed above in console output.") + # IMPORTANT: Database MUST stay alive - do NOT drop it here! + # The GUI will be opened by test_gui.py locally, and cleanup will happen when GUI closes + # If running directly (not through test_gui.py), we'll handle cleanup differently - # Exit with appropriate code + # Check if we're being run through test_gui.py (which will open GUI locally) + # test_gui.py will detect the JSON output and open the viewer + # So we should NOT drop the database here - let test_gui.py handle it + + # Exit with appropriate code (but don't drop database - let GUI handle cleanup) if sup_failed > 0: print("\nโš  Some supplies failed to insert (may be due to missing locations)") + # Drop database on failure + print(f"\n๐Ÿ—‘๏ธ Cleaning up: Dropping test database '{test_db_name}'...") + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + root_conn.commit() + root_cur.close() + root_conn.close() + print(f"โœ“ Test database '{test_db_name}' dropped") sys.exit(1) else: print("\nโœ… All data inserted successfully!") + # DO NOT drop database here - test_gui.py will open GUI and handle cleanup + # The database must stay alive for the GUI to use it + print(f" (Test database '{test_db_name}' will be cleaned up when GUI closes)") sys.exit(0) except mysql.connector.Error as e: print(f"\nโœ— Database error: {e}") + # Try to clean up test database on error + try: + if root_conn is None: + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + root_conn.commit() + root_cur.close() + root_conn.close() + print(f"โœ“ Cleaned up test database '{test_db_name}'") + except: + pass sys.exit(1) except Exception as e: print(f"\nโœ— Unexpected error: {e}") import traceback traceback.print_exc() + # Try to clean up test database on error + try: + base_params = parse_database_url(BASE_DATABASE_URL) + test_db_name = f"{base_params['database']}_test" + root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") + root_conn_params = { + 'host': base_params['host'], + 'port': base_params['port'], + 'user': 'root', + 'password': root_password + } + if root_conn is None: + root_conn = mysql.connector.connect(**root_conn_params) + root_cur = root_conn.cursor() + root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + root_conn.commit() + root_cur.close() + root_conn.close() + print(f"โœ“ Cleaned up test database '{test_db_name}'") + except: + pass sys.exit(1) + finally: + # Ensure connections are closed + if conn: + try: + conn.close() + except: + pass + if root_conn: + try: + root_conn.close() + except: + pass if __name__ == "__main__": From 29a31812a9c0a252577936347dc12fb90a2fc569 Mon Sep 17 00:00:00 2001 From: willzoo Date: Wed, 12 Nov 2025 21:12:23 -0500 Subject: [PATCH 19/20] fixed location supply test --- src/api/db.py | 2 +- src/scripts/helpers.py | 9 +- src/scripts/test_gui.py | 32 +- src/scripts/test_locations_supplies.py | 1163 ++++++++---------------- 4 files changed, 389 insertions(+), 817 deletions(-) diff --git a/src/api/db.py b/src/api/db.py index 958f5ee..deb998d 100644 --- a/src/api/db.py +++ b/src/api/db.py @@ -13,7 +13,7 @@ 'password': os.getenv('DB_PASSWORD', 'mysqlpassword'), 'database': os.getenv('DB_NAME', 'mydb'), 'pool_name': 'mil_sql_pool', - 'pool_size': 5, + 'pool_size': int(os.getenv('DB_POOL_SIZE', 10)), # Increased default to 10, configurable via env 'pool_reset_session': True } diff --git a/src/scripts/helpers.py b/src/scripts/helpers.py index 9680925..bcf8532 100644 --- a/src/scripts/helpers.py +++ b/src/scripts/helpers.py @@ -15,7 +15,14 @@ API_URL = os.getenv("API_URL", "http://localhost:5000/api") # Test API (port 5001, database: mydb_test) -TEST_API_URL = os.getenv("TEST_API_URL", "http://localhost:5001/api") +# Detect if running in Docker (check for /app path or container name) +# When in Docker, use service name 'api-test', otherwise use 'localhost' +if os.path.exists("/app") or os.getenv("HOSTNAME", "").startswith("mysql_api"): + # Running in Docker container - use service name + TEST_API_URL = os.getenv("TEST_API_URL", "http://api-test:5001/api") +else: + # Running locally - use localhost + TEST_API_URL = os.getenv("TEST_API_URL", "http://localhost:5001/api") def get_sql_base_path(script_file): diff --git a/src/scripts/test_gui.py b/src/scripts/test_gui.py index 34e3914..60265e0 100644 --- a/src/scripts/test_gui.py +++ b/src/scripts/test_gui.py @@ -341,36 +341,8 @@ def _open_table_viewer(self, table_data): locations_data = [tuple(row) for row in table_data['locations']['data']] supplies_data = [tuple(row) for row in table_data['supplies']['data']] - # Create cleanup function to drop test database - def cleanup_test_db(): - """Clean up test database when viewer closes.""" - try: - import mysql.connector - from helpers import parse_database_url - - database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@localhost:3306/mydb") - base_params = parse_database_url(database_url) - test_db_name = f"{base_params['database']}_test" - root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") - - # Connect as root to drop database - root_conn = mysql.connector.connect( - host=base_params['host'] if base_params['host'] != 'db' else 'localhost', - port=base_params['port'], - user='root', - password=root_password - ) - root_cur = root_conn.cursor() - root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") - root_conn.commit() - root_cur.close() - root_conn.close() - self.log(f"\nโœ“ Test database '{test_db_name}' cleaned up") - except Exception as e: - self.log(f"\nโš  Could not clean up test database: {e}") - - # Use the interactive viewer with cleanup function - test_module.create_table_viewer(locations_data, supplies_data, cleanup_callback=cleanup_test_db) + # Use the interactive viewer (no cleanup needed - API manages the test database) + test_module.create_table_viewer(locations_data, supplies_data, cleanup_callback=None) except Exception as e: # Fallback to simple viewer self.log(f"\nโš  Could not load interactive viewer, using simple viewer: {e}", "error") diff --git a/src/scripts/test_locations_supplies.py b/src/scripts/test_locations_supplies.py index 4d7fb53..33e652b 100644 --- a/src/scripts/test_locations_supplies.py +++ b/src/scripts/test_locations_supplies.py @@ -1,11 +1,11 @@ """ -Test script for locations and supplies data insertion and verification. +Test script for locations and supplies data using the test API. This script: -1. Inserts sample location data -2. Inserts sample supplies data (linked to locations) -3. Verifies the data was inserted correctly -4. Displays table contents in a GUI window +1. Creates sample locations (3 containers) via API +2. Creates sample supplies via API +3. Displays data in an interactive GUI window +4. Allows moving supplies between containers via API Usage: python src/scripts/test_locations_supplies.py @@ -14,369 +14,301 @@ import sys import json import time -import mysql.connector -import mysql.connector.errors -import tkinter.messagebox import requests -from helpers import ( - parse_database_url, - table_exists, - get_sql_base_path, - discover_table_files, - topological_sort_tables, - execute_sql_file -) +import tkinter as tk +from tkinter import messagebox +from pathlib import Path -# Try to import tkinter, but don't fail if not available (e.g., in Docker) -try: - import tkinter as tk - from tkinter import ttk - HAS_TKINTER = True -except ImportError: - HAS_TKINTER = False +# Add src/scripts to path for imports +sys.path.insert(0, str(Path(__file__).parent)) -# Get base database URL - test will use a separate test database -BASE_DATABASE_URL = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb") +from helpers import TEST_API_URL -# Base path for SQL files (works in Docker and locally) -SQL_BASE_PATH = get_sql_base_path(__file__) +# Container names for this test +CONTAINER_NAMES = ['Container A', 'Container B', 'Container C'] +# Sample supplies (9 distinct supplies, 3 per container) +SAMPLE_SUPPLIES = [ + # Container A + {'name': 'Supply A1', 'amount': 10, 'location': 'Container A'}, + {'name': 'Supply A2', 'amount': 15, 'location': 'Container A'}, + {'name': 'Supply A3', 'amount': 20, 'location': 'Container A'}, + # Container B + {'name': 'Supply B1', 'amount': 12, 'location': 'Container B'}, + {'name': 'Supply B2', 'amount': 18, 'location': 'Container B'}, + {'name': 'Supply B3', 'amount': 25, 'location': 'Container B'}, + # Container C + {'name': 'Supply C1', 'amount': 8, 'location': 'Container C'}, + {'name': 'Supply C2', 'amount': 14, 'location': 'Container C'}, + {'name': 'Supply C3', 'amount': 22, 'location': 'Container C'}, +] -def drop_all_tables(cur, database_name): - """Drop all tables from the test database.""" - print(f"\n๐Ÿ—‘๏ธ Dropping all existing tables from '{database_name}'...") - - # Get all table names from the database - cur.execute( - "SELECT table_name FROM information_schema.tables WHERE table_schema = %s", - (database_name,) - ) - tables = cur.fetchall() - - if not tables: - print(" โŠ˜ No tables to drop") - return - - table_names = [table[0] for table in tables] - print(f" Found {len(table_names)} table(s) to drop") - - # Disable foreign key checks to avoid constraint issues - cur.execute("SET FOREIGN_KEY_CHECKS = 0") - - dropped_count = 0 - for table_name in table_names: - try: - cur.execute(f"DROP TABLE IF EXISTS `{table_name}`") - dropped_count += 1 - except Exception as e: - print(f" โš  Warning: Failed to drop table '{table_name}': {e}") - - # Re-enable foreign key checks - cur.execute("SET FOREIGN_KEY_CHECKS = 1") - - print(f" โœ“ Dropped {dropped_count}/{len(table_names)} table(s)") +# DISTRIBUTED SUPPLY amounts per container +DISTRIBUTED_SUPPLY_AMOUNTS = { + 'Container A': 5, + 'Container B': 10, + 'Container C': 15, +} -def initialize_schema(cur, database_name=None): - """Initialize database schema in correct dependency order.""" - print("\n๐Ÿ“‹ Discovering table files...") - - # Discover all table_*.sql files recursively - table_files = discover_table_files(SQL_BASE_PATH) - - if not table_files: - print(f"โš  No table_*.sql files found in {SQL_BASE_PATH}") - return 0, 0 - - print(f"โœ“ Found {len(table_files)} table file(s)") - - # Sort tables by dependency order - print("๐Ÿ“Š Analyzing dependencies...") - sorted_tables = topological_sort_tables(table_files) - - print("\n๐Ÿ“‹ Initializing database schema...") - - success_count = 0 - for table_name, sql_file in sorted_tables: - description = f"{table_name} table" - # Check if table already exists (for idempotency) - if table_exists(cur, table_name, database_name): - print(f"โŠ˜ {description} already exists, skipping") - continue - - if execute_sql_file(cur, sql_file, description): - success_count += 1 - - print(f"\nโœ“ Schema initialization complete ({success_count}/{len(sorted_tables)} tables created)") - return success_count, len(sorted_tables) +def check_api_available(): + """Check if the test API is available.""" + try: + # Get base URL (remove trailing /api if present) + base_url = TEST_API_URL + if base_url.endswith('/api'): + base_url = base_url[:-4] # Remove '/api' + elif base_url.endswith('/api/'): + base_url = base_url[:-5] # Remove '/api/' + + # Ensure base URL ends with / + if not base_url.endswith('/'): + base_url += '/' + + response = requests.get(f"{base_url}health", timeout=2) + return response.status_code == 200 + except Exception as e: + print(f" Debug: API check failed: {e}") + return False -def insert_sample_locations(cur): - """Insert sample location data - just 3 containers.""" - print("\n๐Ÿ“ฆ Inserting sample locations...") - - sample_locations = [ - ('Container A', 100, 100, 200, 200, 'container'), - ('Container B', 400, 100, 200, 200, 'container'), - ('Container C', 700, 100, 200, 200, 'container'), - ] - - inserted = 0 - skipped = 0 - - for name, x, y, width, height, loc_type in sample_locations: +def create_test_locations(): + """Create test container locations via API.""" + print("๐Ÿ“ฆ Creating test locations...") + + locations_created = 0 + locations_skipped = 0 + + for i, container_name in enumerate(CONTAINER_NAMES): + location_data = { + 'name': container_name, + 'x': 200 + i * 300, + 'y': 200, + 'width': 200, + 'height': 200, + 'type': 'cabinet' + } + try: - cur.execute( - "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)", - (name, x, y, width, height, loc_type) - ) - inserted += 1 - except mysql.connector.IntegrityError: - # Location already exists, skip - skipped += 1 - except Exception as e: - print(f" โœ— Failed to insert {name}: {e}") - - print(f" โœ“ Inserted {inserted} location(s), skipped {skipped} existing") - return inserted, skipped + response = requests.post(f"{TEST_API_URL}/locations", json=location_data, timeout=5) + if response.status_code == 201: + locations_created += 1 + print(f" โœ“ Created {container_name}") + elif response.status_code == 409: + locations_skipped += 1 + print(f" โŠ˜ {container_name} already exists, skipping") + else: + print(f" โœ— Failed to create {container_name}: {response.status_code} - {response.text}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + print(f" โœ— Error creating {container_name}: {e}") + time.sleep(0.1) + + return locations_created, locations_skipped -def insert_sample_supplies(cur): - """Insert sample supplies data - DISTRIBUTED SUPPLY in all containers plus 9 different supplies.""" - print("\n๐Ÿ“ฆ Inserting sample supplies...") - - # Same supply name in all containers, different amounts - supply_name = "DISTRIBUTED SUPPLY" - sample_supplies = [ - # DISTRIBUTED SUPPLY in all 3 containers - (supply_name, 10, '2024-01-15', 'Container A'), - (supply_name, 25, '2024-01-20', 'Container B'), - (supply_name, 5, '2024-02-01', 'Container C'), - # Container A - 3 additional supplies - ('Resistors 1K', 50, '2024-01-15', 'Container A'), - ('Capacitors 100UF', 30, '2024-01-20', 'Container A'), - ('Arduino Uno', 5, '2024-02-01', 'Container A'), - # Container B - 3 additional supplies - ('Screws M3', 200, '2024-01-10', 'Container B'), - ('Bolts M3', 150, '2024-01-10', 'Container B'), - ('Nuts M3', 300, '2024-01-10', 'Container B'), - # Container C - 3 additional supplies - ('LEDs Red', 50, '2024-01-12', 'Container C'), - ('LEDs Green', 50, '2024-01-12', 'Container C'), - ('Multimeter', 2, '2024-01-08', 'Container C'), - ] - - inserted = 0 - failed = 0 - - for name, amount, last_order_date, location in sample_supplies: +def create_test_supplies(): + """Create test supplies via API.""" + print("๐Ÿ“ฆ Creating test supplies...") + + supplies_created = 0 + supplies_failed = 0 + + # First, create DISTRIBUTED SUPPLY in all containers + for container_name in CONTAINER_NAMES: + supply_data = { + 'name': 'DISTRIBUTED SUPPLY', + 'amount': DISTRIBUTED_SUPPLY_AMOUNTS[container_name], + 'location': container_name + } + + try: + response = requests.post(f"{TEST_API_URL}/supplies", json=supply_data, timeout=5) + if response.status_code in [201, 200]: + supplies_created += 1 + print(f" โœ“ Created DISTRIBUTED SUPPLY in {container_name}") + else: + supplies_failed += 1 + print(f" โœ— Failed to create DISTRIBUTED SUPPLY in {container_name}: {response.status_code}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + supplies_failed += 1 + print(f" โœ— Error creating DISTRIBUTED SUPPLY in {container_name}: {e}") + time.sleep(0.1) + + # Then create the 9 distinct supplies + for supply in SAMPLE_SUPPLIES: try: - cur.execute( - "INSERT INTO supplies (name, amount, last_order_date, location) VALUES (%s, %s, %s, %s)", - (name, amount, last_order_date, location) + response = requests.post(f"{TEST_API_URL}/supplies", json=supply, timeout=5) + if response.status_code in [201, 200]: + supplies_created += 1 + else: + supplies_failed += 1 + print(f" โœ— Failed to create {supply['name']}: {response.status_code}") + # Small delay to avoid exhausting connection pool + time.sleep(0.1) + except requests.exceptions.RequestException as e: + supplies_failed += 1 + print(f" โœ— Error creating {supply['name']}: {e}") + time.sleep(0.1) + + return supplies_created, supplies_failed + + +def get_supplies_from_api(): + """Fetch supplies from the test API, filtered to our containers.""" + try: + # Get all supplies and filter to our containers + response = requests.get(f"{TEST_API_URL}/supplies", timeout=5) + if response.status_code == 200: + all_supplies = response.json() + # Filter to only our containers + filtered = [s for s in all_supplies if s['location'] in CONTAINER_NAMES] + return filtered + else: + print(f"Warning: API returned status {response.status_code}") + return [] + except requests.exceptions.RequestException as e: + print(f"Warning: Error fetching supplies from API: {e}") + return [] + + +def move_supplies_via_api(from_container, to_container): + """Move all supplies from one container to another via API.""" + try: + # Get all supplies in source container + response = requests.get(f"{TEST_API_URL}/supplies?location={from_container}", timeout=5) + if response.status_code != 200: + messagebox.showerror("Error", f"Failed to fetch supplies from {from_container}") + return False + + supplies = response.json() + if not supplies: + messagebox.showinfo("Info", f"No supplies in {from_container} to move.") + return False + + # Move each supply + moved_count = 0 + failed_count = 0 + + for supply in supplies: + move_data = { + 'name': supply['name'], + 'from_location': from_container, + 'to_location': to_container, + 'amount': supply['amount'] # Move all + } + + try: + move_response = requests.post(f"{TEST_API_URL}/supplies/move", json=move_data, timeout=5) + if move_response.status_code == 200: + moved_count += 1 + else: + failed_count += 1 + error_msg = move_response.json().get('error', 'Unknown error') + print(f"Failed to move {supply['name']}: {error_msg}") + except requests.exceptions.RequestException as e: + failed_count += 1 + print(f"Error moving {supply['name']}: {e}") + + if failed_count > 0: + messagebox.showwarning( + "Partial Success", + f"Moved {moved_count} supply type(s), {failed_count} failed." ) - inserted += 1 - except Exception as e: - print(f" โœ— Failed to insert {name}: {e}") - failed += 1 - - print(f" โœ“ Inserted {inserted} supply item(s), {failed} failed") - return inserted, failed + else: + # Success - no popup, just refresh + pass + + return moved_count > 0 + + except requests.exceptions.RequestException as e: + messagebox.showerror("Error", f"Failed to move supplies: {e}") + return False -def display_table_contents(table_name, columns, rows): - """Display formatted table contents (console output).""" - print(f"\n๐Ÿ“Š {table_name.upper()} Table Contents:") - print("=" * 80) - - if not rows: - print(f" (empty)") +def create_table_viewer(locations_data=None, supplies_data=None, cleanup_callback=None): + """ + Create and display the interactive table viewer window. + + Args: + locations_data: Optional list of location tuples (for test_gui.py integration) + supplies_data: Optional list of supply tuples (for test_gui.py integration) + cleanup_callback: Optional callback function to call when window closes + """ + try: + import tkinter as tk + except ImportError: + print("โœ— Tkinter not available. Cannot display GUI.") return - # Calculate column widths - col_widths = [len(col) for col in columns] - for row in rows: - for i, val in enumerate(row): - col_widths[i] = max(col_widths[i], len(str(val)) if val else 0) - - # Print header - header = " | ".join(col.ljust(col_widths[i]) for i, col in enumerate(columns)) - print(f" {header}") - print(" " + "-" * len(header)) - - # Print rows - for row in rows: - row_str = " | ".join(str(val).ljust(col_widths[i]) if val is not None else "NULL".ljust(col_widths[i]) - for i, val in enumerate(row)) - print(f" {row_str}") - - print(f"\n Total rows: {len(rows)}") - - -def create_table_viewer(locations_data, supplies_data, test_db_name=None, base_params=None, cleanup_callback=None): - """Create an interactive tkinter window to display and move supplies between containers.""" root = tk.Tk() - root.title("Container Supplies - Move Supplies Between Containers") - root.geometry("1000x700") - - # Set up cleanup when window closes - cleanup_called = {'value': False} + root.title("Container Supplies - Test API") + root.geometry("900x600") - def on_closing(): - if cleanup_callback and not cleanup_called['value']: - cleanup_called['value'] = True + # Set up cleanup callback if provided + if cleanup_callback: + def on_closing(): cleanup_callback() - root.destroy() + root.destroy() + root.protocol("WM_DELETE_WINDOW", on_closing) - root.protocol("WM_DELETE_WINDOW", on_closing) + # Check API availability + api_available = check_api_available() + if not api_available: + warning_label = tk.Label( + root, + text="โš  Test API not available!\nMake sure 'api-test' service is running on port 5001.", + fg="red", + font=("Arial", 12, "bold"), + justify=tk.CENTER + ) + warning_label.pack(pady=20) + root.mainloop() + return - # Get container names - container_names = [row[0] for row in locations_data if 'Container' in row[0]] - if len(container_names) < 3: - container_names = ['Container A', 'Container B', 'Container C'] + # Main frame + main_frame = tk.Frame(root, padx=20, pady=20) + main_frame.pack(fill=tk.BOTH, expand=True) - # Test database connection at startup - db_connection_available = False - try: - # Try to connect to test if connection is available - if test_db_name is None or base_params is None: - base_params_test = parse_database_url(BASE_DATABASE_URL) - test_db_name_test = f"{base_params_test['database']}_test" - else: - base_params_test = base_params - test_db_name_test = test_db_name - - db_params_test = { - 'host': base_params_test['host'] if base_params_test['host'] != 'db' else 'localhost', - 'port': base_params_test['port'], - 'user': base_params_test['user'], - 'password': base_params_test['password'], - 'database': test_db_name_test - } - - # Try to connect with different auth plugins - auth_plugins = [None, 'caching_sha2_password', 'mysql_native_password'] - for auth_plugin in auth_plugins: - try: - test_params = db_params_test.copy() - if auth_plugin: - test_params['auth_plugin'] = auth_plugin - test_conn = mysql.connector.connect(**test_params) - test_conn.close() - db_connection_available = True - break - except: - continue - except: - db_connection_available = False - - # Database connection for refreshing data - # Use test database only (passed from main function) - def get_db_connection(): - if not db_connection_available: - raise Exception("Database connection not available") - - # Use parameters passed from main function - if test_db_name is None or base_params is None: - # Fallback to parsing from environment - base_params_fallback = parse_database_url(BASE_DATABASE_URL) - test_db_name_fallback = f"{base_params_fallback['database']}_test" - else: - base_params_fallback = base_params - test_db_name_fallback = test_db_name - - db_params = { - 'host': base_params_fallback['host'], - 'port': base_params_fallback['port'], - 'user': base_params_fallback['user'], - 'password': base_params_fallback['password'], - 'database': test_db_name_fallback - } - # If host is 'db' and we're not in Docker, use localhost - if db_params.get('host') == 'db' and not os.path.exists("/app"): - db_params['host'] = 'localhost' - - # Try different authentication plugins in order - auth_plugins = [None, 'caching_sha2_password', 'mysql_native_password'] - last_error = None - - for auth_plugin in auth_plugins: - try: - test_params = db_params.copy() - if auth_plugin: - test_params['auth_plugin'] = auth_plugin - return mysql.connector.connect(**test_params) - except mysql.connector.errors.DatabaseError as e: - last_error = e - error_str = str(e).lower() - # If it's an auth/plugin error, try next plugin - if 'auth' in error_str or 'plugin' in error_str: - continue - # If it's a different error, raise it immediately - raise - except Exception as e: - last_error = e - # For non-auth errors, try next plugin anyway - continue - - # If all plugins failed, raise the last error - if last_error: - raise last_error - raise Exception("Failed to connect to database with any authentication plugin") - - def refresh_supplies(): - """Refresh supplies data from test database - only for our 3 containers.""" - if not db_connection_available: - return [] # Return empty if connection not available - - try: - # Read directly from test database, not API - conn = get_db_connection() - cur = conn.cursor() - - # Get supplies from test database for our containers - placeholders = ','.join(['%s'] * len(container_names)) - cur.execute( - f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", - container_names - ) - supplies = cur.fetchall() - - cur.close() - conn.close() - - return list(supplies) - except Exception as e: - print(f"Warning: Error fetching supplies from test database: {e}") - return [] + # Title + title_label = tk.Label(main_frame, text="Container Supplies (Test API)", font=("Arial", 16, "bold")) + title_label.pack(pady=(0, 20)) - def update_display(supplies_rows=None): - """Update the display with current supplies. - - Args: - supplies_rows: Optional list of supply rows. If None, fetches from database. - """ - # Get fresh data if not provided - if supplies_rows is None: - try: - supplies = refresh_supplies() - except Exception as e: - # If refresh fails, show error but don't crash - print(f"Warning: Could not refresh from database: {e}") - return - else: - supplies = supplies_rows + # Container frames (3 columns) + containers_frame = tk.Frame(main_frame) + containers_frame.pack(fill=tk.BOTH, expand=True) + + container_frames = {} + container_labels = {} + + # Define functions before creating buttons (to avoid closure issues) + def update_display(supplies_data=None): + """Update the display with current supplies.""" + if supplies_data is None: + supplies_data = get_supplies_from_api() # Group supplies by location supplies_by_location = {} - for row in supplies: - location = row[4] # location is last column + for supply in supplies_data: + # Handle both dict (from API) and tuple (from test_gui.py) formats + if isinstance(supply, dict): + location = supply['location'] + name = supply['name'] + amount = supply['amount'] + else: + # Tuple format: (id, name, amount, last_order_date, location) + supply_id, name, amount, last_order_date, location = supply + supply = {'id': supply_id, 'name': name, 'amount': amount, 'location': location} + if location not in supplies_by_location: supplies_by_location[location] = [] - supplies_by_location[location].append(row) + supplies_by_location[location].append(supply) # Update each container display - for container_name in container_names: + for container_name in CONTAINER_NAMES: if container_name in container_frames: listbox = container_frames[container_name] listbox.delete(0, tk.END) @@ -384,8 +316,11 @@ def update_display(supplies_rows=None): if container_name in supplies_by_location: total_items = 0 for supply in supplies_by_location[container_name]: - name = supply[1] # name - amount = supply[2] # amount + if isinstance(supply, dict): + name = supply['name'] + amount = supply['amount'] + else: + _, name, amount, _, _ = supply total_items += amount listbox.insert(tk.END, f"{name}: {amount}") @@ -399,177 +334,18 @@ def update_display(supplies_rows=None): if container_name in container_labels: container_labels[container_name].config(text=f"{container_name} (empty)") - def move_supply(from_container, to_container): - """Move all supplies from one container to another in test database.""" - if not db_connection_available: - tk.messagebox.showwarning( - "Database Unavailable", - "Cannot move supplies: Database connection is not available.\n\n" - "This may be due to MySQL authentication issues when connecting from Windows to Docker.\n" - "The data is displayed in read-only mode." - ) - return - - conn = None - try: - # Get all supplies in source container from test database - # Create a fresh connection to avoid any auth issues - conn = get_db_connection() - cur = conn.cursor() - - cur.execute( - "SELECT id, name, amount, last_order_date, location FROM supplies WHERE location = %s", - (from_container,) - ) - supplies_to_move = cur.fetchall() - - if not supplies_to_move: - tk.messagebox.showinfo("Info", f"No supplies in {from_container} to move.") - cur.close() - if conn: - conn.close() - return - - # Move each supply in the test database - moved_count = 0 - failed_count = 0 - - for supply in supplies_to_move: - supply_id, name, amount, last_order_date, location = supply - try: - # Check if supply already exists in target location - cur.execute( - "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", - (name, to_container) - ) - existing = cur.fetchone() - - if existing: - # Merge: add amounts - existing_id, existing_amount = existing - new_amount = existing_amount + amount - cur.execute( - "UPDATE supplies SET amount = %s WHERE id = %s", - (new_amount, existing_id) - ) - # Delete from source - cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) - else: - # Move: update location - cur.execute( - "UPDATE supplies SET location = %s WHERE id = %s", - (to_container, supply_id) - ) - - moved_count += 1 - except mysql.connector.errors.DatabaseError as e: - failed_count += 1 - error_str = str(e).lower() - # If it's an auth/packet error, try to reconnect - if 'malformed packet' in error_str or 'auth' in error_str or 'plugin' in error_str: - print(f"Connection error moving {name}, will retry with fresh connection: {e}") - # Close current connection and try again with fresh one - try: - cur.close() - if conn: - conn.close() - except: - pass - # Retry with fresh connection - try: - conn = get_db_connection() - cur = conn.cursor() - # Re-check if supply exists in target (need to re-query) - cur.execute( - "SELECT id, amount FROM supplies WHERE name = %s AND location = %s", - (name, to_container) - ) - existing_retry = cur.fetchone() - # Retry the operation - if existing_retry: - existing_id, existing_amount = existing_retry - new_amount = existing_amount + amount - cur.execute("UPDATE supplies SET amount = %s WHERE id = %s", (new_amount, existing_id)) - cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,)) - else: - cur.execute("UPDATE supplies SET location = %s WHERE id = %s", (to_container, supply_id)) - moved_count += 1 - failed_count -= 1 # Adjust count since retry succeeded - except Exception as retry_e: - print(f"Retry also failed for {name}: {retry_e}") - else: - print(f"Error moving {name}: {e}") - except Exception as e: - failed_count += 1 - print(f"Error moving {name}: {e}") - - if conn: - conn.commit() - cur.close() - conn.close() - - # Refresh display - update_display() - - # Only show error messages, not success messages - if failed_count > 0: - tk.messagebox.showerror("Error", f"Failed to move {failed_count} supply type(s). {moved_count} succeeded.") - - except mysql.connector.errors.DatabaseError as e: - error_str = str(e).lower() - if 'malformed packet' in error_str or 'auth' in error_str: - # Try one more time with a completely fresh connection - try: - if conn: - try: - conn.close() - except: - pass - conn = get_db_connection() - # If we can get a connection, show a more helpful message - conn.close() - tk.messagebox.showerror("Error", f"Database connection issue. Please try again. Error: {str(e)}") - except Exception as cleanup_e: - tk.messagebox.showerror("Error", f"Failed to connect to database. Make sure MySQL is running. Original error: {str(e)}, Cleanup error: {str(cleanup_e)}") - else: - tk.messagebox.showerror("Error", f"Failed to move supplies: {str(e)}") - except Exception as e: - tk.messagebox.showerror("Error", f"Failed to move supplies: {str(e)}") - finally: - # Ensure connection is closed - if conn: - try: - conn.close() - except: - pass - - # Main container - main_frame = tk.Frame(root, padx=20, pady=20) - main_frame.pack(fill=tk.BOTH, expand=True) - - # Show warning if database connection unavailable - if not db_connection_available: - warning_label = tk.Label( - main_frame, - text="โš  Database connection unavailable - Move operations disabled\n(Data is read-only)", - font=("Arial", 10), - fg="orange", - bg="yellow" - ) - warning_label.pack(pady=10) + def refresh_display(): + """Refresh the display from API.""" + supplies = get_supplies_from_api() + update_display(supplies) - # Title - title_label = tk.Label(main_frame, text="Container Supplies", font=("Arial", 16, "bold")) - title_label.pack(pady=(0, 20)) + def move_and_refresh(from_container, to_container): + """Move supplies and refresh display.""" + if move_supplies_via_api(from_container, to_container): + refresh_display() - # Container frames (3 columns) - containers_frame = tk.Frame(main_frame) - containers_frame.pack(fill=tk.BOTH, expand=True) - - container_frames = {} - container_labels = {} - - for i, container_name in enumerate(container_names[:3]): + # Now create the UI elements + for i, container_name in enumerate(CONTAINER_NAMES): # Container column col_frame = tk.Frame(containers_frame) col_frame.grid(row=0, column=i, padx=20, sticky="nsew") @@ -589,308 +365,125 @@ def move_supply(from_container, to_container): buttons_frame = tk.Frame(col_frame) buttons_frame.pack(pady=10) - for other_container in container_names[:3]: + for other_container in CONTAINER_NAMES: if other_container != container_name: btn_text = f"Move to {other_container}" btn = tk.Button( buttons_frame, text=btn_text, - command=lambda f=container_name, t=other_container: move_supply(f, t), + command=lambda f=container_name, t=other_container: move_and_refresh(f, t), width=20, height=2, font=("Arial", 10, "bold"), - bg="#4CAF50" if db_connection_available else "#cccccc", - fg="white", - state=tk.NORMAL if db_connection_available else tk.DISABLED + bg="#4CAF50", + fg="white" ) btn.pack(pady=5, fill=tk.X) - # Refresh button (only enabled if DB connection available) + # Refresh button refresh_btn = tk.Button( main_frame, - text="Refresh", - command=update_display, - width=15, + text="๐Ÿ”„ Refresh", + command=refresh_display, + width=20, height=2, - state=tk.NORMAL if db_connection_available else tk.DISABLED + font=("Arial", 10, "bold"), + bg="#2196F3", + fg="white" ) refresh_btn.pack(pady=10) - # Close button - close_btn = tk.Button( - main_frame, - text="Close", - command=root.destroy, - width=15, - height=2 - ) - close_btn.pack() - - # Initial display - use passed data, convert to same format as database rows - # supplies_data format: (id, name, amount, last_order_date, location) - initial_supplies = [] - for row in supplies_data: - # Convert tuple to list format matching database query result - initial_supplies.append(row) - - update_display(initial_supplies) + # Initial display + if locations_data and supplies_data: + # Use provided data (from test_gui.py) + update_display(supplies_data) + else: + # Fetch from API (standalone mode) + refresh_display() root.mainloop() def main(): - """Main test function - uses test database only, deletes and recreates it.""" - root_conn = None - conn = None - try: - # Parse base database URL - base_params = parse_database_url(BASE_DATABASE_URL) - test_db_name = f"{base_params['database']}_test" - - print("๐Ÿงช Starting Locations & Supplies Data Test") - print(f"๐Ÿ“Š Test database: {test_db_name}") - print("=" * 60) - print(f"๐Ÿ”Œ Connecting to MySQL server...") - - # Use root credentials for database operations - root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") - - # Connect as root - root_conn_params = { - 'host': base_params['host'], - 'port': base_params['port'], - 'user': 'root', - 'password': root_password - } - - root_conn = mysql.connector.connect(**root_conn_params) - root_cur = root_conn.cursor() - - # Verify connection - root_cur.execute("SELECT VERSION();") - version = root_cur.fetchone()[0] - print(f"โœ“ Connected to MySQL: {version}") - - # STEP 1: Drop test database if it exists (clean start) - print(f"\n๐Ÿ—‘๏ธ Dropping test database '{test_db_name}' if it exists...") - root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") - root_conn.commit() - print(f"โœ“ Test database dropped (if it existed)") - - # Small delay to ensure database is fully dropped - time.sleep(0.5) - - # STEP 2: Create fresh test database - print(f"\n๐Ÿ“ฆ Creating fresh test database '{test_db_name}'...") - root_cur.execute(f"CREATE DATABASE `{test_db_name}`") - - # Grant permissions to mysqluser on the test database - username = base_params['user'] - root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") - root_cur.execute("FLUSH PRIVILEGES") - root_conn.commit() - print(f"โœ“ Test database created and permissions granted") - - root_cur.close() - root_conn.close() - root_conn = None - - # Small delay to ensure privileges are propagated - time.sleep(0.5) - - # STEP 3: Connect as mysqluser to test database - print(f"\n๐Ÿ”Œ Connecting as '{base_params['user']}' to test database...") - conn_params = { - 'host': base_params['host'], - 'port': base_params['port'], - 'user': base_params['user'], - 'password': base_params['password'], - 'database': test_db_name - } - - try: - conn = mysql.connector.connect(**conn_params) - cur = conn.cursor() - print(f"โœ“ Connected successfully to test database") - except mysql.connector.Error as e: - print(f"โœ— Failed to connect as {base_params['user']}: {e}") - print(f" Attempting to verify privileges...") - # Try to reconnect as root to check if user exists - root_conn = mysql.connector.connect(**root_conn_params) - root_cur = root_conn.cursor() - root_cur.execute(f"SELECT User, Host FROM mysql.user WHERE User = '{username}'") - users = root_cur.fetchall() - if not users: - print(f" โš  User '{username}' does not exist. Creating user...") - root_cur.execute(f"CREATE USER IF NOT EXISTS '{username}'@'%' IDENTIFIED BY '{base_params['password']}'") - root_cur.execute(f"GRANT ALL PRIVILEGES ON `{test_db_name}`.* TO '{username}'@'%'") - root_cur.execute("FLUSH PRIVILEGES") - root_conn.commit() - root_cur.close() - root_conn.close() - root_conn = None - time.sleep(0.5) - # Retry connection - conn = mysql.connector.connect(**conn_params) - cur = conn.cursor() - print(f"โœ“ Connected successfully after creating user") - else: - raise - - # STEP 4: Initialize schema in test database - created_count, total_count = initialize_schema(cur, test_db_name) - conn.commit() - - # STEP 5: Insert sample locations - loc_inserted, loc_skipped = insert_sample_locations(cur) - conn.commit() - - # STEP 6: Insert sample supplies - container_names = ['Container A', 'Container B', 'Container C'] - sup_inserted, sup_failed = insert_sample_supplies(cur) - conn.commit() - - # STEP 7: Get table contents for GUI display - only our 3 containers - print("\n๐Ÿ“Š Fetching table contents...") - placeholders = ','.join(['%s'] * len(container_names)) - cur.execute( - f"SELECT name, x, y, width, height, type FROM locations WHERE name IN ({placeholders}) ORDER BY name", - container_names - ) - locations_data = cur.fetchall() - - cur.execute( - f"SELECT id, name, amount, last_order_date, location FROM supplies WHERE location IN ({placeholders}) ORDER BY location, name", - container_names - ) - supplies_data = cur.fetchall() - - # Display table contents in console - print("\n" + "=" * 60) - display_table_contents('locations', ['name', 'x', 'y', 'width', 'height', 'type'], locations_data) - display_table_contents('supplies', ['id', 'name', 'amount', 'last_order_date', 'location'], supplies_data) - - # Summary - print("\n" + "=" * 60) - print("โœ… TEST SUMMARY:") - print(f" Locations: {loc_inserted} inserted, {loc_skipped} skipped (already existed)") - print(f" Supplies: {sup_inserted} inserted, {sup_failed} failed") - print("=" * 60) - - # Close user connection - database stays alive for GUI - cur.close() - conn.close() - conn = None - # IMPORTANT: Database must remain alive for GUI to use it! - - # Convert data for JSON output (for test_gui.py to display locally) - locations_json = [list(row) for row in locations_data] - supplies_json = [list(row) for row in supplies_data] - - # Convert date objects to strings - for row in supplies_json: - if row[3] is not None and hasattr(row[3], 'isoformat'): - row[3] = row[3].isoformat() - - table_data = { - 'locations': { - 'columns': ['name', 'x', 'y', 'width', 'height', 'type'], - 'data': locations_json - }, - 'supplies': { - 'columns': ['id', 'name', 'amount', 'last_order_date', 'location'], - 'data': supplies_json - } - } - - # Output JSON with special marker for test_gui.py to detect - print("\n" + "=" * 60) - print("TABLE_DATA_JSON_START") - print(json.dumps(table_data, indent=2)) - print("TABLE_DATA_JSON_END") - print("=" * 60) - - # IMPORTANT: Database MUST stay alive - do NOT drop it here! - # The GUI will be opened by test_gui.py locally, and cleanup will happen when GUI closes - # If running directly (not through test_gui.py), we'll handle cleanup differently - - # Check if we're being run through test_gui.py (which will open GUI locally) - # test_gui.py will detect the JSON output and open the viewer - # So we should NOT drop the database here - let test_gui.py handle it - - # Exit with appropriate code (but don't drop database - let GUI handle cleanup) - if sup_failed > 0: - print("\nโš  Some supplies failed to insert (may be due to missing locations)") - # Drop database on failure - print(f"\n๐Ÿ—‘๏ธ Cleaning up: Dropping test database '{test_db_name}'...") - root_conn = mysql.connector.connect(**root_conn_params) - root_cur = root_conn.cursor() - root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") - root_conn.commit() - root_cur.close() - root_conn.close() - print(f"โœ“ Test database '{test_db_name}' dropped") - sys.exit(1) - else: - print("\nโœ… All data inserted successfully!") - # DO NOT drop database here - test_gui.py will open GUI and handle cleanup - # The database must stay alive for the GUI to use it - print(f" (Test database '{test_db_name}' will be cleaned up when GUI closes)") - sys.exit(0) - - except mysql.connector.Error as e: - print(f"\nโœ— Database error: {e}") - # Try to clean up test database on error - try: - if root_conn is None: - root_conn = mysql.connector.connect(**root_conn_params) - root_cur = root_conn.cursor() - root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") - root_conn.commit() - root_cur.close() - root_conn.close() - print(f"โœ“ Cleaned up test database '{test_db_name}'") - except: - pass + """Main test function.""" + print("๐Ÿงช Starting Locations & Supplies Data Test (via API)") + print(f"๐ŸŒ Test API: {TEST_API_URL}") + print("=" * 60) + + # Check if API is available + print("๐Ÿ”Œ Checking test API availability...") + print(f" Testing connection to: {TEST_API_URL.replace('/api', '')}/health") + if not check_api_available(): + print("โœ— Test API is not available!") + print(f" Make sure the 'api-test' service is running on port 5001") + print(f" Run 'make up' or 'make test' to start services") + print(f" Expected URL: {TEST_API_URL}") sys.exit(1) - except Exception as e: - print(f"\nโœ— Unexpected error: {e}") - import traceback - traceback.print_exc() - # Try to clean up test database on error - try: - base_params = parse_database_url(BASE_DATABASE_URL) - test_db_name = f"{base_params['database']}_test" - root_password = os.getenv("MYSQL_ROOT_PASSWORD", "rootpassword") - root_conn_params = { - 'host': base_params['host'], - 'port': base_params['port'], - 'user': 'root', - 'password': root_password - } - if root_conn is None: - root_conn = mysql.connector.connect(**root_conn_params) - root_cur = root_conn.cursor() - root_cur.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") - root_conn.commit() - root_cur.close() - root_conn.close() - print(f"โœ“ Cleaned up test database '{test_db_name}'") - except: - pass - sys.exit(1) - finally: - # Ensure connections are closed - if conn: - try: - conn.close() - except: - pass - if root_conn: - try: - root_conn.close() - except: - pass + + print("โœ“ Test API is available") + + # Create test locations + print("\n" + "=" * 60) + loc_created, loc_skipped = create_test_locations() + + # Create test supplies + print("\n" + "=" * 60) + sup_created, sup_failed = create_test_supplies() + + # Summary + print("\n" + "=" * 60) + print("โœ… TEST SUMMARY:") + print(f" Locations: {loc_created} created, {loc_skipped} skipped (already existed)") + print(f" Supplies: {sup_created} created, {sup_failed} failed") + print("=" * 60) + + # Get current data for display + print("\n๐Ÿ“Š Fetching current data from API...") + supplies_data = get_supplies_from_api() + + # Output JSON data for test_gui.py to display + table_data = { + 'locations': { + 'columns': ['name', 'x', 'y', 'width', 'height', 'type'], + 'data': [ + [name, 200 + i * 300, 200, 200, 200, 'cabinet'] + for i, name in enumerate(CONTAINER_NAMES) + ] + }, + 'supplies': { + 'columns': ['id', 'name', 'amount', 'last_order_date', 'location'], + 'data': [ + [ + s.get('id', 0), + s['name'], + s['amount'], + s.get('last_order_date'), + s['location'] + ] + for s in supplies_data + ] + } + } + + print("\n" + "=" * 60) + print("TABLE_DATA_JSON_START") + print(json.dumps(table_data, indent=2)) + print("TABLE_DATA_JSON_END") + print("=" * 60) + + # Only open GUI viewer if running locally (not in Docker) + # When running in Docker, test_gui.py will open the viewer locally + is_docker = os.path.exists("/app") or os.getenv("HOSTNAME", "").startswith("mysql_api") + + if not is_docker: + # Running locally - open GUI viewer + print("\n๐ŸชŸ Opening table viewer window...") + create_table_viewer() + else: + # Running in Docker - test_gui.py will open viewer locally + print("\n๐Ÿ“Š Test data ready (GUI will open locally via test_gui.py)") + + print("\nโœ… Test completed successfully!") if __name__ == "__main__": From e987c232362c6230a397869035887de0b5c4ceeb Mon Sep 17 00:00:00 2001 From: willzoo Date: Thu, 13 Nov 2025 13:10:42 -0500 Subject: [PATCH 20/20] Docker warning message --- Makefile | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 1514901..134445c 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,14 @@ PROJECT_NAME=mysql_service COMPOSE=docker-compose -p $(PROJECT_NAME) +# Check if Docker is running +.PHONY: check-docker +check-docker: + @docker info >nul 2>&1 || (echo. && echo ERROR: Docker is not running. Please start Docker Desktop and try again. && echo. && exit /b 1) + ## up: Start the mysql, api (port 5000), and api-test (port 5001) containers and seed default locations .PHONY: up -up: +up: check-docker $(COMPOSE) up -d @echo "Waiting for services to be ready..." @timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true @@ -11,32 +16,32 @@ up: ## up-empty: Start the mysql, api (port 5000), and api-test (port 5001) containers without seeding data .PHONY: up-empty -up-empty: +up-empty: check-docker $(COMPOSE) up -d ## down: Stop and remove containers (keeps volumes) .PHONY: down -down: +down: check-docker $(COMPOSE) down ## status: Show status of all services .PHONY: status -status: +status: check-docker $(COMPOSE) ps ## logs: Follow logs from all services .PHONY: logs -logs: +logs: check-docker $(COMPOSE) logs -f ## mysql: Open a mysql shell into the mysql container .PHONY: mysql -mysql: +mysql: check-docker $(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb ## test: Open GUI test runner (starts services if not running, including test API on port 5001) .PHONY: test -test: +test: check-docker @echo "Ensuring services are running..." @$(COMPOSE) up -d @echo "Waiting for services to be ready..." @@ -45,12 +50,12 @@ test: ## build: Build or rebuild services .PHONY: build -build: +build: check-docker $(COMPOSE) build ## clean: Remove containers, networks, volumes, and orphans .PHONY: clean -clean: +clean: check-docker $(COMPOSE) down -v --remove-orphans docker volume prune -f