diff --git a/app/__init__.py b/app/__init__.py index 2764c4cc8..cac5d268b 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -4,12 +4,10 @@ import os from dotenv import load_dotenv - db = SQLAlchemy() migrate = Migrate() load_dotenv() - def create_app(test_config=None): app = Flask(__name__) app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False @@ -29,6 +27,10 @@ def create_app(test_config=None): db.init_app(app) migrate.init_app(app, db) - # Register Blueprints here + from .routes import tasks_bp + app.register_blueprint(tasks_bp) + + from .routes import goals_bp + app.register_blueprint(goals_bp) return app diff --git a/app/models/goal.py b/app/models/goal.py index 8cad278f8..640f5ea0a 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,6 +1,11 @@ from flask import current_app +from sqlalchemy.orm import backref from app import db +from sqlalchemy import ForeignKey, update +from sqlalchemy.orm import relationship class Goal(db.Model): goal_id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String) + tasks = db.relationship('Task', backref='goal', lazy=True) \ No newline at end of file diff --git a/app/models/task.py b/app/models/task.py index 39c89cd16..768e24345 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,6 +1,13 @@ from flask import current_app from app import db +from sqlalchemy import ForeignKey, update +from sqlalchemy.orm import relationship class Task(db.Model): - task_id = db.Column(db.Integer, primary_key=True) + task_id = db.Column(db.Integer, primary_key=True, autoincrement=True) + title = db.Column(db.String) + description = db.Column(db.String) + completed_at = db.Column(db.DateTime, nullable=True, default=None) + is_complete = db.Column(db.Boolean, default=False) + goal_id = db.Column(db.Integer, db.ForeignKey('goal.goal_id'), nullable=True) \ No newline at end of file diff --git a/app/routes.py b/app/routes.py index 8e9dfe684..864180562 100644 --- a/app/routes.py +++ b/app/routes.py @@ -1,2 +1,314 @@ -from flask import Blueprint +import re +from app.models.task import Task +from app.models.goal import Goal +from app import db +from flask import request, Blueprint, make_response, jsonify# +from datetime import datetime +import os +import requests +from dotenv import load_dotenv +tasks_bp = Blueprint("tasks", __name__, url_prefix="/tasks") +goals_bp = Blueprint("goals", __name__, url_prefix="/goals") + +load_dotenv() + +def post_message_to_slack(text): + SLACK_TOKEN = os.environ.get('SLACKBOT_TOKEN') + slack_path = "https://slack.com/api/chat/postMessage" + query_params = { + 'channel': 'task-notifications', + 'text': text + } + headers = {'Authorization': f"Bearer {SLACK_TOKEN}"} + requests.post(slack_path, params=query_params, headers=headers) + +@goals_bp.route("", methods=["GET", "POST"]) +def handle_goals(): + if request.method == "POST": + request_body = request.get_json() + title = request_body.get("title") + + if "title" not in request_body: + return jsonify({"details": "Invalid data"}), 400 + + new_goal = Goal(title=request_body["title"]) + + db.session.add(new_goal) + db.session.commit() + + committed_goal = { + "goal": { + "id": new_goal.goal_id, + "title": new_goal.title + } + } + + return jsonify(committed_goal), 201 + + elif request.method == "GET": + goals = Goal.query.all() + goals_response = [] + + for goal in goals: + goals_response.append({ + "title": goal.title, + "id": goal.goal_id + }) + + return jsonify(goals_response), 200 + +@goals_bp.route("/", methods=["GET", "PUT", "DELETE"]) +def handle_goal(goal_id): + goal = Goal.query.get_or_404(goal_id) + + if request.method =="GET": + if goal == None: + return make_response("No matching goal found"), 404 + + tasks = [] + + for item in goal.tasks: + tasks.append(item) + + selected_goal = {"goal": + {"id": goal.goal_id, + "title": goal.title, + "tasks": tasks + }} + return selected_goal + + elif request.method == "PUT": + form_data = request.get_json() + goal.title = form_data["title"] + + db.session.commit() + + committed_goal = {"goal": + {"id": goal.goal_id, + "title": goal.title, + }} + return jsonify(committed_goal), 200 + + elif request.method == "DELETE": + db.session.delete(goal) + db.session.commit() + goal_response_body = { + "details": + f'Goal {goal.goal_id} successfully deleted' + } + return jsonify(goal_response_body) + +@goals_bp.route("//tasks", methods=["GET", "POST"]) +def handle_goal_tasks(goal_id): + goal = Goal.query.get_or_404(goal_id) + + if request.method == "GET": + tasks = goal.tasks + list_of_tasks = [] + + for task in tasks: + if task.completed_at == None: + completed_at = False + else: + completed_at = True + + individual_task = { + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": completed_at, + "goal_id": goal.goal_id + } + list_of_tasks.append(individual_task) + + return make_response({ + "id": goal.goal_id, + "title": goal.title, + "tasks": list_of_tasks + }) + + if request.method == "POST": + goal = Goal.query.get(goal_id) + request_body = request.get_json() + + for ids_per_task in request_body["task_ids"]: + task = Task.query.get(ids_per_task) + goal.tasks.append(task) + db.session.add(goal) + db.session.commit() + + return make_response({ + "id": goal.goal_id, + "task_ids": request_body["task_ids"] + }) + +@tasks_bp.route("", methods=["GET", "POST"]) +def handle_tasks(): + if request.method == "POST": + request_body = request.get_json() + title = request_body.get("title") + description = request_body.get("description") + + if not title or not description or "completed_at" not in request_body: + return make_response(jsonify({"details": "Invalid data"}), 400) + + new_task = Task( + title=request_body["title"], + description=request_body["description"], + completed_at=request_body["completed_at"]) + + if new_task.completed_at == None: + completed_at = False + else: + completed_at = True + + db.session.add(new_task) + db.session.commit() + + return make_response({ + "task": { + "id": new_task.task_id, + "title": new_task.title, + "description": new_task.description, + "is_complete": completed_at + }}, 201) + + elif request.method == "GET": + url_title = request.args.get("title") + if url_title: + tasks = Task.query.filter_by(title=url_title) + else: + tasks = Task.query.order_by(Task.title).all() + + sort = request.args.get("sort") + if not sort: + tasks = Task.query.all() + elif sort == "asc": + tasks = Task.query.order_by(Task.title.asc()).all() + elif sort == "desc": + tasks = Task.query.order_by(Task.title.desc()).all() + else: + tasks = Task.query.all() + + tasks_response = [] + + for task in tasks: + if task.completed_at == None: + completed_at = False + else: + completed_at = True + + tasks_response.append({ + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": completed_at + }) + return jsonify(tasks_response), 200 + +@tasks_bp.route("/", methods=["GET", "PUT", "DELETE"]) +def handle_task(task_id): + task = Task.query.get(task_id) + if task is None: + return make_response("No matching task found", 404) + + if request.method == "GET": + + if task.completed_at == None: + completed_at = False + else: + completed_at = True + + selected_task = { + "task": { + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": completed_at + } + } + + if task.goal_id == None: + return make_response(selected_task) + else: + return make_response({ + "task": { + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": completed_at, + "goal_id": task.goal_id + } + }) + + elif request.method == "PUT": + request_body = request.get_json() + + task.title = request_body["title"] + task.description = request_body["description"] + task.completed_at = request_body["completed_at"] + + updated_task = {"task": + {"id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": bool(task.completed_at) + }} + db.session.add(task) + db.session.commit() + + return make_response(jsonify(updated_task)), 200 + + elif request.method == "DELETE": + db.session.delete(task) + db.session.commit() + task_response_body = { + "details": + f'Task {task.task_id} \"{task.title}\" successfully deleted' + } + return jsonify(task_response_body), 200 + +@tasks_bp.route("//mark_complete", methods=["PATCH"]) +def mark_task_complete(task_id): + task = Task.query.get_or_404(task_id) + + task.completed_at = datetime.now() + + db.session.commit() + + slack_message = f"A user just completed task: {task.title}" + post_message_to_slack(slack_message) + + if task.completed_at == None: + completed_at = False + else: + completed_at = True + + completed_task = { + "task": { + "id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": completed_at + }} + return jsonify(completed_task), 200 + +@tasks_bp.route("//mark_incomplete", methods=["PATCH"]) +def mark_task_incomplete(task_id): + task = Task.query.get_or_404(task_id) + + task.completed_at = None + db.session.commit() + incompleted_task = {"task": + + {"id": task.task_id, + "title": task.title, + "description": task.description, + "is_complete": bool(task.completed_at) + }} + return jsonify(incompleted_task), 200 + + +# https://github.com/OhCloud/task-list-api +# https://github.com/Ada-C15A/task-list-api/pull/3 \ No newline at end of file diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..f8ed4801f --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..8b3fb3353 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,96 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/29b91b61ddcf_.py b/migrations/versions/29b91b61ddcf_.py new file mode 100644 index 000000000..48be023de --- /dev/null +++ b/migrations/versions/29b91b61ddcf_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: 29b91b61ddcf +Revises: 86487318a410 +Create Date: 2021-06-15 11:37:16.113615 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '29b91b61ddcf' +down_revision = '86487318a410' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('goal', sa.Column('title', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('goal', 'title') + # ### end Alembic commands ### diff --git a/migrations/versions/4d5d03b10019_.py b/migrations/versions/4d5d03b10019_.py new file mode 100644 index 000000000..77f384c50 --- /dev/null +++ b/migrations/versions/4d5d03b10019_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: 4d5d03b10019 +Revises: 5d3ef5e8db55 +Create Date: 2021-06-15 12:50:20.967617 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4d5d03b10019' +down_revision = '5d3ef5e8db55' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_foreign_key(None, 'task', 'goal', ['goal_id'], ['goal_id']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'task', type_='foreignkey') + # ### end Alembic commands ### diff --git a/migrations/versions/5d3ef5e8db55_.py b/migrations/versions/5d3ef5e8db55_.py new file mode 100644 index 000000000..bca4af3cb --- /dev/null +++ b/migrations/versions/5d3ef5e8db55_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: 5d3ef5e8db55 +Revises: 29b91b61ddcf +Create Date: 2021-06-15 12:39:42.033091 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5d3ef5e8db55' +down_revision = '29b91b61ddcf' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('task', sa.Column('goal_id', sa.Integer(), nullable=True)) + op.add_column('task', sa.Column('is_complete', sa.Boolean(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('task', 'is_complete') + op.drop_column('task', 'goal_id') + # ### end Alembic commands ### diff --git a/migrations/versions/86487318a410_.py b/migrations/versions/86487318a410_.py new file mode 100644 index 000000000..6eb978e56 --- /dev/null +++ b/migrations/versions/86487318a410_.py @@ -0,0 +1,33 @@ +"""empty message + +Revision ID: 86487318a410 +Revises: +Create Date: 2021-06-09 12:18:41.790660 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '86487318a410' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('goal_id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('goal_id') + ) + op.drop_column('task', 'is_complete') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('task', sa.Column('is_complete', sa.BOOLEAN(), autoincrement=False, nullable=True)) + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index cfdf74050..5207bdfd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,6 +26,7 @@ python-dotenv==0.15.0 python-editor==1.0.4 requests==2.25.1 six==1.15.0 +slackclient==2.9.3 SQLAlchemy==1.3.23 toml==0.10.2 urllib3==1.26.4