diff --git a/ada-project-docs/wave_02.md b/ada-project-docs/wave_02.md index acc1dc0a4..1f3560e3c 100644 --- a/ada-project-docs/wave_02.md +++ b/ada-project-docs/wave_02.md @@ -10,7 +10,7 @@ The following are required routes for wave 2. Feel free to implement the routes ### Tips -- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any mispellings. +- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any misspellings. - Use the tests in `tests/test_wave_02.py` to guide your implementation. - You may feel that there are missing tests and missing edge cases considered in this wave. This is intentional. - You have fulfilled wave 2 requirements if all of the wave 2 tests pass. diff --git a/app/__init__.py b/app/__init__.py index 2764c4cc8..088fc5129 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -30,5 +30,10 @@ def create_app(test_config=None): migrate.init_app(app, db) # Register Blueprints here + from .routes import tasks_bp + app.register_blueprint(tasks_bp) + + from .routes import goals_bp + app.register_blueprint(goals_bp) return app diff --git a/app/models/goal.py b/app/models/goal.py index 8cad278f8..8c95c70b7 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,6 +1,23 @@ -from flask import current_app +from flask import current_app, jsonify from app import db class Goal(db.Model): goal_id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String) + tasks = db.relationship("Task", backref= "goal") + + def goal_dict(self): + return{ + "id":self.goal_id, + "title":self.title + } + @classmethod + def goal_arguments(cls,title_from_url): + if title_from_url: + goals = Goal.query.filter_by(title=title_from_url).all() + if not goals: + goals = Goal.query.filter(Goal.title.contains(title_from_url)) + else: + goals = Goal.query.all() + return goals \ No newline at end of file diff --git a/app/models/task.py b/app/models/task.py index 39c89cd16..a95d4eaf9 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,6 +1,25 @@ -from flask import current_app +from flask import current_app,request from app import db +from dotenv import load_dotenv +from sqlalchemy import desc,asc class Task(db.Model): - task_id = db.Column(db.Integer, primary_key=True) + task_id = db.Column(db.Integer, primary_key=True) #autoincrement=True) + title = db.Column(db.String) + description = db.Column(db.String) + completed_at = db.Column(db.DateTime, nullable=True) + goal_id = db.Column(db.Integer,db.ForeignKey('goal.goal_id')) + + def task_dict(self): + task_dict = { + "id": self.task_id, + "title": self.title, + "description": self.description, + "is_complete": False if self.completed_at is not None else True + } + + if self.goal_id: + task_dict["goal_id"]=self.goal_id + + return task_dict \ No newline at end of file diff --git a/app/routes.py b/app/routes.py index 8e9dfe684..052db22b8 100644 --- a/app/routes.py +++ b/app/routes.py @@ -1,2 +1,255 @@ -from flask import Blueprint +from flask import Blueprint, jsonify, make_response,request +from app.models.task import Task +from app.models.goal import Goal +from sqlalchemy import asc, desc +from app import db +from datetime import datetime + +tasks_bp = Blueprint("tasks", __name__,url_prefix=("/tasks")) +goals_bp = Blueprint("goals", __name__, url_prefix=("/goals")) + +@goals_bp.route("",methods=["POST", "GET"]) +def handle_goal(): + if request.method == "POST": + request_body = request.get_json() + if "title" not in request_body: + return{ + "details": "Invalid data" + },400 + + new_goal = Goal ( + title=request_body["title"] + ) + + db.session.add(new_goal) + db.session.commit() + + return { + "goal": { + "id":new_goal.goal_id, + "title":new_goal.title + } + }, 201 + + elif request.method == "GET": + sorting_goals= request.args.get('sort') + task_list = None + if sorting_goals== "desc": + task_list = Goal.query.order_by(Goal.title.desc()) + elif sorting_goals == "asc": + task_list = Goal.query.order_by(Goal.title.asc()) + else: + task_list = Goal.query.all() + goals_response = [] + for goal in list: + goals_response.append({ + "id": goal.goal_id, + "title": goal.title, + }) + + return jsonify(goals_response) + +@goals_bp.route("/", methods=["GET","PUT","DELETE"]) +def handle_goal_get(goal_id): + goal = Goal.query.get(goal_id) + if goal == None: + return ("", 404) + + if request.method == "GET": + return { + "goal": { + "id":goal.goal_id, + "title":goal.title, + } + } + if request.method == "PUT": + form_data = request.get_json() + + goal.title = form_data["title"] + + db.session.commit() + + return jsonify({ + "goal":{ + "id":goal.goal_id, + "title":goal.title, + } + }),200 + + elif request.method == "DELETE": + db.session.delete(goal) + db.session.commit() + + return jsonify({ + "details": f'Goal {goal.goal_id} "{goal.title}" successfully deleted' + }),200 + + +@goals_bp.route("//tasks", methods=["POST","GET"]) +def post_tasked_goal(goal_id): + + goal = Goal.query.get(goal_id) + + if goal == None: + return (""), 404 + + if request.method == "POST": + request_body = request.get_json() + + tasks_instances= [] + for task_id in request_body["task_ids"]: + tasks_instances.append(Task.query.get(task_id)) + + goal.tasks = tasks_instances + + db.session.commit() + + task_ids = [] + for task in goal.tasks: + task_ids.append(task.task_id) + + response_body = { + "id": goal.goal_id, + "task_ids": task_ids + } + + return jsonify(response_body), 200 + + if request.method == "GET": + tasks_response =[] + for task in goal.tasks: + tasks_response.append({ + "id": task.task_id, + "goal_id": task.goal_id, + "title": task.title, + "description": task.description, + "is_complete": bool(task.completed_at) + }) + response_body = { + "id": goal.goal_id, + "title": goal.title, + "tasks" : tasks_response + } + return jsonify(response_body), 200 + + +@tasks_bp.route("",methods=["GET","POST"]) +def handle_task(): + if request.method == "GET": + + sorting_task = request.args.get('sort') + list = None + if sorting_task == "desc": + list = Task.query.order_by(Task.title.desc()) # descending method + elif sorting_task == "asc": + list = Task.query.order_by(Task.title.asc()) # ascending method + else: + list = Task.query.all() + tasks_response = [] + for task in list: + tasks_response.append({ + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete" : False + if task.completed_at == None else True + }) + + return jsonify(tasks_response) + + elif request.method == "POST": + request_body = request.get_json() + if "title" not in request_body or "description" not in request_body or "completed_at" not in request_body: + return{ + "details": "Invalid data" + },400 + + new_task = Task ( + title=request_body["title"], + description=request_body["description"], + completed_at=request_body["completed_at"] + ) + + db.session.add(new_task) + db.session.commit() + + return { + "task":new_task.task_dict() + }, 201 + +def update_completion(task_id, value): + task = Task.query.get(task_id) + + if not task: + return("Task not found",404) + + task.completed_at = value + + return { + "task":task.task_dict() + }, 200 + +@tasks_bp.route("//mark_complete", methods=["PATCH"]) +def mark_complete(task_id): + return update_completion(task_id, datetime.now()) + +@tasks_bp.route("//mark_incomplete", methods =["PATCH"]) +def mark_incomplete(task_id): + return update_completion(task_id,None) + + +@tasks_bp.route("/", methods=["GET","PUT","DELETE"]) +def handle_task_get(task_id): + task = Task.query.get(task_id) + if task == None: + return ("", 404) + + if request.method == "GET": + response_body = {} + response_body["task"] = task.task_dict() + + return jsonify(response_body) + + if request.method == "PUT": + form_data = request.get_json() + + task.title = form_data["title"] + task.description = form_data["description"] + + db.session.commit() + + return jsonify({ + "task":{ + "id":task.task_id, + "title":task.title, + "description":task.description, + "is_complete":False if task.completed_at + == None else True + } + }),200 + + elif request.method == "DELETE": + db.session.delete(task) + db.session.commit() + + return jsonify({ + "details": f'Task {task.task_id} "{task.title}" successfully deleted' + }),200 + + + + + + + + + + + + + + + + + diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..f8ed4801f --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..8b3fb3353 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,96 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/1b6ffde0dc2d_.py b/migrations/versions/1b6ffde0dc2d_.py new file mode 100644 index 000000000..a7f966d91 --- /dev/null +++ b/migrations/versions/1b6ffde0dc2d_.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 1b6ffde0dc2d +Revises: +Create Date: 2021-11-05 18:17:18.453055 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1b6ffde0dc2d' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('goal_id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('goal_id') + ) + op.create_table('task', + sa.Column('task_id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('task_id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ###