From 5936b2e8eff378b039d1bd2d90e95e10e7164711 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:27:24 -0400 Subject: [PATCH 01/37] Created 2 api endpoints; one to get a workflow from a workflow spec, and one to list all standalone workflow specs. Also added `standalone` argument to workflow_spec schema properties --- crc/api.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/crc/api.yml b/crc/api.yml index 6dc1afa6..bf1a573d 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -411,6 +411,18 @@ paths: application/json: schema: $ref: "#/components/schemas/WorkflowSpec" + post: + operationId: crc.api.workflow.get_workflow_from_spec + summary: Creates a workflow from a workflow spec and returns the workflow + tags: + - Workflow Specifications + responses: + '200': + description: Workflow generated successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" put: operationId: crc.api.workflow.update_workflow_specification security: @@ -440,6 +452,21 @@ paths: responses: '204': description: The workflow specification has been removed. + /workflow-specification/standalone: + get: + operationId: crc.api.workflow.standalone_workflow_specs + summary: Provides a list of workflow specifications that can be run outside a study. + tags: + - Workflow Specifications + responses: + '200': + description: A list of workflow specifications + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/WorkflowSpec" /workflow-specification/{spec_id}/validate: parameters: - name: spec_id @@ -1472,6 +1499,9 @@ components: category_id: type: integer nullable: true + standalone: + type: boolean + example: false workflow_spec_category: $ref: "#/components/schemas/WorkflowSpecCategory" is_status: From fa818bd751d029188e8777c2e56b7565b3797c88 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:31:12 -0400 Subject: [PATCH 02/37] Code for the 2 new api endpoints Also modified `update_task` so that it no longer requires a study_id --- crc/api/workflow.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/crc/api/workflow.py b/crc/api/workflow.py index 46398904..4bdc9185 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -101,6 +101,24 @@ def delete_workflow_specification(spec_id): session.commit() +def get_workflow_from_spec(spec_id): + workflow_model = WorkflowService.get_workflow_from_spec(spec_id, g.user) + processor = WorkflowProcessor(workflow_model) + + processor.do_engine_steps() + processor.save() + WorkflowService.update_task_assignments(processor) + + workflow_api_model = WorkflowService.processor_to_workflow_api(processor) + return WorkflowApiSchema().dump(workflow_api_model) + + +def standalone_workflow_specs(): + schema = WorkflowSpecModelSchema(many=True) + specs = WorkflowService.get_standalone_workflow_specs() + return schema.dump(specs) + + def get_workflow(workflow_id, do_engine_steps=True): """Retrieve workflow based on workflow_id, and return it in the last saved State. If do_engine_steps is False, return the workflow without running any engine tasks or logging any events. """ @@ -184,9 +202,6 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals if workflow_model is None: raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404) - elif workflow_model.study is None: - raise ApiError("invalid_study", "There is no study associated with the given workflow.", status_code=404) - processor = WorkflowProcessor(workflow_model) task_id = uuid.UUID(task_id) spiff_task = processor.bpmn_workflow.get_task(task_id) From c0655c9d03aeb2c9e659ab554a5376322ab85b33 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:33:55 -0400 Subject: [PATCH 03/37] Added `standalone` column to workflow_spec Added `user_id` column to workflow --- crc/models/workflow.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crc/models/workflow.py b/crc/models/workflow.py index 0da32aec..10805a9a 100644 --- a/crc/models/workflow.py +++ b/crc/models/workflow.py @@ -33,6 +33,7 @@ class WorkflowSpecModel(db.Model): category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True) category = db.relationship("WorkflowSpecCategoryModel") is_master_spec = db.Column(db.Boolean, default=False) + standalone = db.Column(db.Boolean, default=False) class WorkflowSpecModelSchema(SQLAlchemyAutoSchema): @@ -88,6 +89,7 @@ class WorkflowModel(db.Model): total_tasks = db.Column(db.Integer, default=0) completed_tasks = db.Column(db.Integer, default=0) last_updated = db.Column(db.DateTime) + user_id = db.Column(db.String, default=None) # Order By is important or generating hashes on reviews. dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan", order_by="WorkflowSpecDependencyFile.file_data_id") From 22432aaf2c611c0d1ed2d3f63d7b1e9e2ee7e281 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:36:09 -0400 Subject: [PATCH 04/37] study_id is no longer required for task events --- crc/models/task_event.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crc/models/task_event.py b/crc/models/task_event.py index 21a239ee..70bb1be5 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -10,7 +10,7 @@ from crc.services.ldap_service import LdapService class TaskEventModel(db.Model): __tablename__ = 'task_event' id = db.Column(db.Integer, primary_key=True) - study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=False) + study_id = db.Column(db.Integer, db.ForeignKey('study.id')) user_uid = db.Column(db.String, nullable=False) # In some cases the unique user id may not exist in the db yet. workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False) workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id')) From 1f50bdcb8107cc9b9ce3c57ad7531aa9c07b4e26 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:36:56 -0400 Subject: [PATCH 05/37] Migration script for database changes --- migrations/versions/8b976945a54e_.py | 30 ++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 migrations/versions/8b976945a54e_.py diff --git a/migrations/versions/8b976945a54e_.py b/migrations/versions/8b976945a54e_.py new file mode 100644 index 00000000..7805e31b --- /dev/null +++ b/migrations/versions/8b976945a54e_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: 8b976945a54e +Revises: c872232ebdcb +Create Date: 2021-04-18 11:42:41.894378 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8b976945a54e' +down_revision = 'c872232ebdcb' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('workflow', sa.Column('user_id', sa.String(), nullable=True)) + op.add_column('workflow_spec', sa.Column('standalone', sa.String(), default=False)) + op.execute("UPDATE workflow_spec SET standalone=False WHERE standalone is null;") + op.execute("ALTER TABLE task_event ALTER COLUMN study_id DROP NOT NULL") + + +def downgrade(): + op.execute("UPDATE workflow SET user_id=NULL WHERE user_id is not NULL") + op.drop_column('workflow', 'user_id') + op.drop_column('workflow_spec', 'standalone') + op.execute("ALTER TABLE task_event ALTER COLUMN study_id SET NOT NULL ") From b6f500168764d686caecdb90a655742bb1d2bfb8 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:43:14 -0400 Subject: [PATCH 06/37] Added methods for the 2 new api endpoints --- crc/services/workflow_service.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 265b61e7..a08087a1 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -783,3 +783,19 @@ class WorkflowService(object): for workflow in workflows: if workflow.status == WorkflowStatus.user_input_required or workflow.status == WorkflowStatus.waiting: WorkflowProcessor.reset(workflow, clear_data=False) + + @staticmethod + def get_workflow_from_spec(workflow_spec_id, user): + workflow_model = WorkflowModel(status=WorkflowStatus.not_started, + study=None, + user_id=user.uid, + workflow_spec_id=workflow_spec_id, + last_updated=datetime.now()) + db.session.add(workflow_model) + db.session.commit() + return workflow_model + + @staticmethod + def get_standalone_workflow_specs(): + specs = db.session.query(WorkflowSpecModel).filter_by(standalone=True).all() + return specs From f17a9dc0f44e4f9db35bde8847d30860bfb1ec2f Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:45:08 -0400 Subject: [PATCH 07/37] Modified `get_users_assigned_to_task`. If we are running a standalone workflow, only return the current user. --- crc/services/workflow_service.py | 51 +++++++++++++++++++------------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index a08087a1..9edaf856 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -667,30 +667,39 @@ class WorkflowService(object): @staticmethod def get_users_assigned_to_task(processor, spiff_task) -> List[str]: - if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None: - associated = StudyService.get_study_associates(processor.workflow_model.study.id) - return [user['uid'] for user in associated if user['access']] - if spiff_task.task_spec.lane not in spiff_task.data: - return [] # No users are assignable to the task at this moment - lane_users = spiff_task.data[spiff_task.task_spec.lane] - if not isinstance(lane_users, list): - lane_users = [lane_users] + if processor.workflow_model.study_id is None and processor.workflow_model.user_id is None: + raise ApiError.from_task(code='invalid_workflow', + message='A workflow must have either a study_id or a user_id.', + task=spiff_task) + # Standalone workflow - we only care about the current user + elif processor.workflow_model.study_id is None and processor.workflow_model.user_id is not None: + return [processor.workflow_model.user_id] + # Workflow associated with a study - get all the users + else: + if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None: + associated = StudyService.get_study_associates(processor.workflow_model.study.id) + return [user['uid'] for user in associated if user['access']] + if spiff_task.task_spec.lane not in spiff_task.data: + return [] # No users are assignable to the task at this moment + lane_users = spiff_task.data[spiff_task.task_spec.lane] + if not isinstance(lane_users, list): + lane_users = [lane_users] - lane_uids = [] - for user in lane_users: - if isinstance(user, dict): - if 'value' in user and user['value'] is not None: - lane_uids.append(user['value']) + lane_uids = [] + for user in lane_users: + if isinstance(user, dict): + if 'value' in user and user['value'] is not None: + lane_uids.append(user['value']) + else: + raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." % + spiff_task.task_spec.name, task=spiff_task) + elif isinstance(user, str): + lane_uids.append(user) else: - raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." % - spiff_task.task_spec.name, task=spiff_task) - elif isinstance(user, str): - lane_uids.append(user) - else: - raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" % - spiff_task.task_spec.name, task=spiff_task) + raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" % + spiff_task.task_spec.name, task=spiff_task) - return lane_uids + return lane_uids @staticmethod def log_task_action(user_uid, processor, spiff_task, action): From 077b68c1e2ef11d0bb4abafa1b6fb09699be1203 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:46:19 -0400 Subject: [PATCH 08/37] When creating a workflow for a study, set the user_id to None --- crc/services/study_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/crc/services/study_service.py b/crc/services/study_service.py index 749a4e14..9bc5a598 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -495,6 +495,7 @@ class StudyService(object): def _create_workflow_model(study: StudyModel, spec): workflow_model = WorkflowModel(status=WorkflowStatus.not_started, study=study, + user_id=None, workflow_spec_id=spec.id, last_updated=datetime.now()) session.add(workflow_model) From 6c98e8a2f40245e0492cf545ffe60ead66bff9e1 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:47:03 -0400 Subject: [PATCH 09/37] removed duplicate code --- tests/base_test.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index a9fcfce1..d50831c9 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -175,11 +175,6 @@ class BaseTest(unittest.TestCase): specs = session.query(WorkflowSpecModel).all() self.assertIsNotNone(specs) - for spec in specs: - files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all() - self.assertIsNotNone(files) - self.assertGreater(len(files), 0) - for spec in specs: files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all() self.assertIsNotNone(files) From a17e1bfacaa50d53a55ff3b0b42bf22fc0c53377 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:50:46 -0400 Subject: [PATCH 10/37] When completing a form in a test, check whether workflow is standalone. Do not set study_id when workflow is standalone. *** Note the comment about passing in workflow_spec. We should be passing in a workflow, not a workflow_spec. --- tests/base_test.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/base_test.py b/tests/base_test.py index d50831c9..a7f5d25b 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -373,6 +373,10 @@ class BaseTest(unittest.TestCase): def complete_form(self, workflow_in, task_in, dict_data, update_all=False, error_code=None, terminate_loop=None, user_uid="dhf8r"): + # workflow_in should be a workflow, not a workflow_api + # we were passing in workflow_api in many of our tests, and + # this caused problems testing standalone workflows + standalone = getattr(workflow_in.workflow_spec, 'standalone', False) prev_completed_task_count = workflow_in.completed_tasks if isinstance(task_in, dict): task_id = task_in["id"] @@ -415,7 +419,8 @@ class BaseTest(unittest.TestCase): .order_by(TaskEventModel.date.desc()).all() self.assertGreater(len(task_events), 0) event = task_events[0] - self.assertIsNotNone(event.study_id) + if not standalone: + self.assertIsNotNone(event.study_id) self.assertEqual(user_uid, event.user_uid) self.assertEqual(workflow.id, event.workflow_id) self.assertEqual(workflow.workflow_spec_id, event.workflow_spec_id) From e836242f10a593ca15981f7e623b132585861e34 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:52:12 -0400 Subject: [PATCH 11/37] Added standalone argument when adding a workflow_spec --- example_data.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/example_data.py b/example_data.py index f3918846..bc7c438c 100644 --- a/example_data.py +++ b/example_data.py @@ -266,7 +266,7 @@ class ExampleDataLoader: from_tests=True) def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False, - category_id=None, display_order=None, from_tests=False): + category_id=None, display_order=None, from_tests=False, standalone=False): """Assumes that a directory exists in static/bpmn with the same name as the given id. further assumes that the [id].bpmn is the primary file for the workflow. returns an array of data models to be added to the database.""" @@ -278,7 +278,8 @@ class ExampleDataLoader: description=description, is_master_spec=master_spec, category_id=category_id, - display_order=display_order) + display_order=display_order, + standalone=standalone) db.session.add(spec) db.session.commit() if not filepath and not from_tests: From 54ab7bd2f68eecbc40afebae6eed94c724e7d71e Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:53:25 -0400 Subject: [PATCH 12/37] Test and workflow for launching a standalone workflow --- tests/data/hello_world/hello_world.bpmn | 58 +++++++++++++++++++++ tests/test_launch_workflow_outside_study.py | 23 ++++++++ 2 files changed, 81 insertions(+) create mode 100644 tests/data/hello_world/hello_world.bpmn create mode 100644 tests/test_launch_workflow_outside_study.py diff --git a/tests/data/hello_world/hello_world.bpmn b/tests/data/hello_world/hello_world.bpmn new file mode 100644 index 00000000..168aee1a --- /dev/null +++ b/tests/data/hello_world/hello_world.bpmn @@ -0,0 +1,58 @@ + + + + This workflow asks for a name and says hello + + SequenceFlow_0qyd2b7 + + + + Hello + + + + + + SequenceFlow_0qyd2b7 + SequenceFlow_1h46b40 + + + + Hello {{name}} + SequenceFlow_1h46b40 + SequenceFlow_0lqrc6e + + + SequenceFlow_0lqrc6e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/test_launch_workflow_outside_study.py b/tests/test_launch_workflow_outside_study.py new file mode 100644 index 00000000..fbf51cc9 --- /dev/null +++ b/tests/test_launch_workflow_outside_study.py @@ -0,0 +1,23 @@ +from tests.base_test import BaseTest + +from crc import session +from crc.models.user import UserModel +from crc.services.workflow_service import WorkflowService + +from example_data import ExampleDataLoader + + +class TestNoStudyWorkflow(BaseTest): + + def test_no_study_workflow(self): + self.load_example_data() + spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True) + user = session.query(UserModel).first() + self.assertIsNotNone(user) + workflow_model = WorkflowService.get_workflow_from_spec(spec.id, user) + workflow_api = self.get_workflow_api(workflow_model) + first_task = workflow_api.next_task + self.complete_form(workflow_model, first_task, {'name': 'Big Guy'}) + workflow_api = self.get_workflow_api(workflow_model) + second_task = workflow_api.next_task + self.assertEqual(second_task.documentation, 'Hello Big Guy') From c002ef1e76a1368fe4fb79dfa650de5d88e1a139 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:56:21 -0400 Subject: [PATCH 13/37] Added test for new api endpoint to get all standalone workflow_specs. Modified test_add_new_workflow_specification to include new standalone argument --- tests/workflow/test_workflow_spec_api.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/tests/workflow/test_workflow_spec_api.py b/tests/workflow/test_workflow_spec_api.py index d54fbbf1..25c9ef6a 100644 --- a/tests/workflow/test_workflow_spec_api.py +++ b/tests/workflow/test_workflow_spec_api.py @@ -5,6 +5,8 @@ from crc import session from crc.models.file import FileModel from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel +from example_data import ExampleDataLoader + class TestWorkflowSpec(BaseTest): @@ -28,7 +30,8 @@ class TestWorkflowSpec(BaseTest): category_id = session.query(WorkflowSpecCategoryModel).first().id category_count = session.query(WorkflowSpecModel).filter_by(category_id=category_id).count() spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies', - description='Om nom nom delicious cookies', category_id=category_id) + description='Om nom nom delicious cookies', category_id=category_id, + standalone=False) rv = self.app.post('/v1.0/workflow-specification', headers=self.logged_in_headers(), content_type="application/json", @@ -101,3 +104,16 @@ class TestWorkflowSpec(BaseTest): num_workflows_after = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count() self.assertEqual(num_files_after + num_workflows_after, 0) + def test_get_standalone_workflow_specs(self): + self.load_example_data() + category = session.query(WorkflowSpecCategoryModel).first() + ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=category.id, + standalone=True, from_tests=True) + rv = self.app.get('/v1.0/workflow-specification/standalone', headers=self.logged_in_headers()) + self.assertEqual(1, len(rv.json)) + + ExampleDataLoader().create_spec('email_script', 'Email Script', category_id=category.id, + standalone=True, from_tests=True) + + rv = self.app.get('/v1.0/workflow-specification/standalone', headers=self.logged_in_headers()) + self.assertEqual(2, len(rv.json)) From 7cca559747bef3bedf6eadb8247c8679910bb5ab Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 08:58:09 -0400 Subject: [PATCH 14/37] Fixed issue where we were passing in a workflow_api instead of a workflow when completing a form in tests. --- tests/study/test_study_cancellations.py | 12 ++++---- tests/test_email_script.py | 3 -- tests/test_message_event.py | 6 ++-- tests/test_multi_instance_tasks_api.py | 10 +++---- tests/test_tasks_api.py | 22 +++++++------- .../workflow/test_workflow_boolean_default.py | 2 +- ..._workflow_enum_default_value_expression.py | 16 +++++----- .../workflow/test_workflow_form_field_name.py | 2 +- .../test_workflow_hidden_required_field.py | 5 ++-- tests/workflow/test_workflow_restart.py | 29 +++++++++---------- .../test_workflow_value_expression.py | 4 +-- 11 files changed, 52 insertions(+), 59 deletions(-) diff --git a/tests/study/test_study_cancellations.py b/tests/study/test_study_cancellations.py index 4b0d61f9..df0ba2a2 100644 --- a/tests/study/test_study_cancellations.py +++ b/tests/study/test_study_cancellations.py @@ -72,7 +72,7 @@ class TestStudyCancellations(BaseTest): workflow, study_id = self.load_workflow() workflow_api, first_task = self.get_first_task(workflow) - self.complete_form(workflow_api, first_task, {}) + self.complete_form(workflow, first_task, {}) study_result = self.put_study_on_hold(study_id) self.assertEqual('New Title', study_result.title) @@ -82,10 +82,10 @@ class TestStudyCancellations(BaseTest): workflow, study_id = self.load_workflow() workflow_api, first_task = self.get_first_task(workflow) - self.complete_form(workflow_api, first_task, {}) + self.complete_form(workflow, first_task, {}) workflow_api, next_task = self.get_second_task(workflow) - self.complete_form(workflow_api, next_task, {'how_many': 3}) + self.complete_form(workflow, next_task, {'how_many': 3}) study_result = self.put_study_on_hold(study_id) self.assertEqual('Second Title', study_result.title) @@ -95,13 +95,13 @@ class TestStudyCancellations(BaseTest): workflow, study_id = self.load_workflow() workflow_api, first_task = self.get_first_task(workflow) - self.complete_form(workflow_api, first_task, {}) + self.complete_form(workflow, first_task, {}) workflow_api, second_task = self.get_second_task(workflow) - self.complete_form(workflow_api, second_task, {'how_many': 3}) + self.complete_form(workflow, second_task, {'how_many': 3}) workflow_api, third_task = self.get_third_task(workflow) - self.complete_form(workflow_api, third_task, {}) + self.complete_form(workflow, third_task, {}) study_result = self.put_study_on_hold(study_id) self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title) diff --git a/tests/test_email_script.py b/tests/test_email_script.py index 980d0c06..7786b045 100644 --- a/tests/test_email_script.py +++ b/tests/test_email_script.py @@ -23,7 +23,6 @@ class TestEmailScript(BaseTest): first_task = self.get_workflow_api(workflow).next_task - workflow = self.get_workflow_api(workflow) self.complete_form(workflow, first_task, {'subject': 'My Email Subject', 'recipients': 'test@example.com'}) self.assertEqual(1, len(outbox)) @@ -49,7 +48,6 @@ class TestEmailScript(BaseTest): def test_bad_email_address_1(self): workflow = self.create_workflow('email_script') first_task = self.get_workflow_api(workflow).next_task - workflow = self.get_workflow_api(workflow) with self.assertRaises(AssertionError): self.complete_form(workflow, first_task, {'recipients': 'test@example'}) @@ -57,7 +55,6 @@ class TestEmailScript(BaseTest): def test_bad_email_address_2(self): workflow = self.create_workflow('email_script') first_task = self.get_workflow_api(workflow).next_task - workflow = self.get_workflow_api(workflow) with self.assertRaises(AssertionError): self.complete_form(workflow, first_task, {'recipients': 'test'}) diff --git a/tests/test_message_event.py b/tests/test_message_event.py index f761871b..3d769095 100644 --- a/tests/test_message_event.py +++ b/tests/test_message_event.py @@ -13,10 +13,10 @@ class TestMessageEvent(BaseTest): # Start the workflow. first_task = self.get_workflow_api(workflow).next_task self.assertEqual('Activity_GetData', first_task.name) - workflow = self.get_workflow_api(workflow) + self.complete_form(workflow, first_task, {'formdata': 'asdf'}) - workflow = self.get_workflow_api(workflow) - self.assertEqual('Activity_HowMany', workflow.next_task.name) + workflow_api = self.get_workflow_api(workflow) + self.assertEqual('Activity_HowMany', workflow_api.next_task.name) # reset the workflow # this ultimately calls crc.api.workflow.set_current_task diff --git a/tests/test_multi_instance_tasks_api.py b/tests/test_multi_instance_tasks_api.py index d4f26297..12036e2d 100644 --- a/tests/test_multi_instance_tasks_api.py +++ b/tests/test_multi_instance_tasks_api.py @@ -67,14 +67,14 @@ class TestMultiinstanceTasksApi(BaseTest): content_type="application/json") self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) - workflow = WorkflowApiSchema().load(json_data) - data = workflow.next_task.data + workflow_api = WorkflowApiSchema().load(json_data) + data = workflow_api.next_task.data data['investigator']['email'] = "dhf8r@virginia.edu" - self.complete_form(workflow, workflow.next_task, data) + self.complete_form(workflow, workflow_api.next_task, data) #tasks = self.get_workflow_api(workflow).user_tasks - workflow = self.get_workflow_api(workflow) - self.assertEqual(WorkflowStatus.complete, workflow.status) + workflow_api = self.get_workflow_api(workflow) + self.assertEqual(WorkflowStatus.complete, workflow_api.status) @patch('crc.services.protocol_builder.requests.get') diff --git a/tests/test_tasks_api.py b/tests/test_tasks_api.py index f2742960..9c6f0b41 100644 --- a/tests/test_tasks_api.py +++ b/tests/test_tasks_api.py @@ -386,15 +386,15 @@ class TestTasksApi(BaseTest): # Start the workflow. first_task = self.get_workflow_api(workflow).next_task self.complete_form(workflow, first_task, {"has_bananas": True}) - workflow = self.get_workflow_api(workflow) - self.assertEqual('Task_Num_Bananas', workflow.next_task.name) + workflow_api = self.get_workflow_api(workflow) + self.assertEqual('Task_Num_Bananas', workflow_api.next_task.name) # Trying to re-submit the initial task, and answer differently, should result in an error. self.complete_form(workflow, first_task, {"has_bananas": False}, error_code="invalid_state") # Go ahead and set the number of bananas. - workflow = self.get_workflow_api(workflow) - task = workflow.next_task + workflow_api = self.get_workflow_api(workflow) + task = workflow_api.next_task self.complete_form(workflow, task, {"num_bananas": 4}) # We are now at the end of the workflow. @@ -405,19 +405,19 @@ class TestTasksApi(BaseTest): content_type="application/json") self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) - workflow = WorkflowApiSchema().load(json_data) + workflow_api = WorkflowApiSchema().load(json_data) # Assure the Next Task is the one we just reset the token to be on. - self.assertEqual("Task_Has_Bananas", workflow.next_task.name) + self.assertEqual("Task_Has_Bananas", workflow_api.next_task.name) # Go ahead and get that workflow one more time, it should still be right. - workflow = self.get_workflow_api(workflow) + workflow_api = self.get_workflow_api(workflow) # Assure the Next Task is the one we just reset the token to be on. - self.assertEqual("Task_Has_Bananas", workflow.next_task.name) + self.assertEqual("Task_Has_Bananas", workflow_api.next_task.name) # The next task should be a different value. - self.complete_form(workflow, workflow.next_task, {"has_bananas": False}) - workflow = self.get_workflow_api(workflow) - self.assertEqual('Task_Why_No_Bananas', workflow.next_task.name) + self.complete_form(workflow, workflow_api.next_task, {"has_bananas": False}) + workflow_api = self.get_workflow_api(workflow) + self.assertEqual('Task_Why_No_Bananas', workflow_api.next_task.name) diff --git a/tests/workflow/test_workflow_boolean_default.py b/tests/workflow/test_workflow_boolean_default.py index 1773295a..a2b04228 100644 --- a/tests/workflow/test_workflow_boolean_default.py +++ b/tests/workflow/test_workflow_boolean_default.py @@ -7,7 +7,7 @@ class TestBooleanDefault(BaseTest): workflow = self.create_workflow('boolean_default_value') workflow_api = self.get_workflow_api(workflow) set_default_task = workflow_api.next_task - result = self.complete_form(workflow_api, set_default_task, {'yes_no': yes_no}) + result = self.complete_form(workflow, set_default_task, {'yes_no': yes_no}) return result def test_boolean_true_string(self): diff --git a/tests/workflow/test_workflow_enum_default_value_expression.py b/tests/workflow/test_workflow_enum_default_value_expression.py index 00c2abf6..644d52f8 100644 --- a/tests/workflow/test_workflow_enum_default_value_expression.py +++ b/tests/workflow/test_workflow_enum_default_value_expression.py @@ -7,35 +7,35 @@ class TestWorkflowEnumDefault(BaseTest): def test_enum_default_from_value_expression(self): workflow = self.create_workflow('enum_value_expression') - first_task = self.get_workflow_api(workflow).next_task - self.assertEqual('Activity_UserInput', first_task.name) workflow_api = self.get_workflow_api(workflow) + first_task = workflow_api.next_task + self.assertEqual('Activity_UserInput', first_task.name) - result = self.complete_form(workflow_api, first_task, {'user_input': True}) + result = self.complete_form(workflow, first_task, {'user_input': True}) self.assertIn('user_input', result.next_task.data) self.assertEqual(True, result.next_task.data['user_input']) self.assertIn('lookup_output', result.next_task.data) self.assertEqual('black', result.next_task.data['lookup_output']) workflow_api = self.get_workflow_api(workflow) - self.assertEqual('Activity_PickColor', self.get_workflow_api(workflow_api).next_task.name) + self.assertEqual('Activity_PickColor', workflow_api.next_task.name) self.assertEqual({'value': 'black', 'label': 'Black'}, workflow_api.next_task.data['color_select']) # workflow = self.create_workflow('enum_value_expression') - first_task = self.get_workflow_api(workflow).next_task - self.assertEqual('Activity_UserInput', first_task.name) workflow_api = self.get_workflow_api(workflow) + first_task = workflow_api.next_task + self.assertEqual('Activity_UserInput', first_task.name) - result = self.complete_form(workflow_api, first_task, {'user_input': False}) + result = self.complete_form(workflow, first_task, {'user_input': False}) self.assertIn('user_input', result.next_task.data) self.assertEqual(False, result.next_task.data['user_input']) self.assertIn('lookup_output', result.next_task.data) self.assertEqual('white', result.next_task.data['lookup_output']) workflow_api = self.get_workflow_api(workflow) - self.assertEqual('Activity_PickColor', self.get_workflow_api(workflow_api).next_task.name) + self.assertEqual('Activity_PickColor', workflow_api.next_task.name) self.assertEqual({'value': 'white', 'label': 'White'}, workflow_api.next_task.data['color_select']) def test_enum_value_expression_and_default(self): diff --git a/tests/workflow/test_workflow_form_field_name.py b/tests/workflow/test_workflow_form_field_name.py index 8a6aead2..6e38a816 100644 --- a/tests/workflow/test_workflow_form_field_name.py +++ b/tests/workflow/test_workflow_form_field_name.py @@ -18,7 +18,7 @@ class TestFormFieldName(BaseTest): workflow_api = self.get_workflow_api(workflow) first_task = workflow_api.next_task - self.complete_form(workflow_api, first_task, {}) + self.complete_form(workflow, first_task, {}) workflow_api = self.get_workflow_api(workflow) second_task = workflow_api.next_task diff --git a/tests/workflow/test_workflow_hidden_required_field.py b/tests/workflow/test_workflow_hidden_required_field.py index b77dbb24..610c917d 100644 --- a/tests/workflow/test_workflow_hidden_required_field.py +++ b/tests/workflow/test_workflow_hidden_required_field.py @@ -34,14 +34,13 @@ class TestWorkflowHiddenRequiredField(BaseTest): first_task = workflow_api.next_task self.assertEqual('Activity_Hello', first_task.name) - workflow_api = self.get_workflow_api(workflow) - self.complete_form(workflow_api, first_task, {}) + self.complete_form(workflow, first_task, {}) workflow_api = self.get_workflow_api(workflow) second_task = workflow_api.next_task self.assertEqual('Activity_HiddenField', second_task.name) - self.complete_form(workflow_api, second_task, {}) + self.complete_form(workflow, second_task, {}) workflow_api = self.get_workflow_api(workflow) # The color field is hidden and required. Make sure we use the default value diff --git a/tests/workflow/test_workflow_restart.py b/tests/workflow/test_workflow_restart.py index 98688964..0aafb492 100644 --- a/tests/workflow/test_workflow_restart.py +++ b/tests/workflow/test_workflow_restart.py @@ -9,20 +9,20 @@ class TestWorkflowRestart(BaseTest): workflow = self.create_workflow('message_event') - first_task = self.get_workflow_api(workflow).next_task - self.assertEqual('Activity_GetData', first_task.name) workflow_api = self.get_workflow_api(workflow) + first_task = workflow_api.next_task + self.assertEqual('Activity_GetData', first_task.name) - result = self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) + result = self.complete_form(workflow, first_task, {'formdata': 'asdf'}) self.assertIn('formdata', result.next_task.data) self.assertEqual('asdf', result.next_task.data['formdata']) workflow_api = self.get_workflow_api(workflow) - self.assertEqual('Activity_HowMany', self.get_workflow_api(workflow_api).next_task.name) + self.assertEqual('Activity_HowMany', workflow_api.next_task.name) # restart with data. should land at beginning with data workflow_api = self.restart_workflow_api(result) - first_task = self.get_workflow_api(workflow_api).next_task + first_task = workflow_api.next_task self.assertEqual('Activity_GetData', first_task.name) self.assertIn('formdata', workflow_api.next_task.data) self.assertEqual('asdf', workflow_api.next_task.data['formdata']) @@ -33,21 +33,19 @@ class TestWorkflowRestart(BaseTest): self.assertEqual('Activity_GetData', first_task.name) self.assertNotIn('formdata', workflow_api.next_task.data) - print('Nice Test') - def test_workflow_restart_on_cancel_notify(self): workflow = self.create_workflow('message_event') study_id = workflow.study_id # Start the workflow. - first_task = self.get_workflow_api(workflow).next_task - self.assertEqual('Activity_GetData', first_task.name) workflow_api = self.get_workflow_api(workflow) - self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) + first_task = workflow_api.next_task + self.assertEqual('Activity_GetData', first_task.name) + self.complete_form(workflow, first_task, {'formdata': 'asdf'}) workflow_api = self.get_workflow_api(workflow) self.assertEqual('Activity_HowMany', workflow_api.next_task.name) - workflow_api = self.restart_workflow_api(workflow) + self.restart_workflow_api(workflow) study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first() self.assertEqual('New Title', study_result.title) @@ -66,17 +64,16 @@ class TestWorkflowRestart(BaseTest): study_id = workflow.study_id # Start the workflow. - first_task = self.get_workflow_api(workflow).next_task - self.assertEqual('Activity_GetData', first_task.name) workflow_api = self.get_workflow_api(workflow) - self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) + first_task = workflow_api.next_task + self.assertEqual('Activity_GetData', first_task.name) + self.complete_form(workflow, first_task, {'formdata': 'asdf'}) workflow_api = self.get_workflow_api(workflow) next_task = workflow_api.next_task self.assertEqual('Activity_HowMany', next_task.name) - self.complete_form(workflow_api, next_task, {'how_many': 3}) + self.complete_form(workflow, next_task, {'how_many': 3}) - workflow_api = self.restart_workflow_api(workflow) study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first() self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title) diff --git a/tests/workflow/test_workflow_value_expression.py b/tests/workflow/test_workflow_value_expression.py index d527c694..47751b1c 100644 --- a/tests/workflow/test_workflow_value_expression.py +++ b/tests/workflow/test_workflow_value_expression.py @@ -9,7 +9,7 @@ class TestValueExpression(BaseTest): workflow_api = self.get_workflow_api(workflow) first_task = workflow_api.next_task - self.complete_form(workflow_api, first_task, {'value_expression_value': ''}) + self.complete_form(workflow, first_task, {'value_expression_value': ''}) workflow_api = self.get_workflow_api(workflow) second_task = workflow_api.next_task @@ -26,7 +26,7 @@ class TestValueExpression(BaseTest): workflow_api = self.get_workflow_api(workflow) first_task = workflow_api.next_task - self.complete_form(workflow_api, first_task, {'value_expression_value': 'black'}) + self.complete_form(workflow, first_task, {'value_expression_value': 'black'}) workflow_api = self.get_workflow_api(workflow) second_task = workflow_api.next_task From 1a44f50de6f7e0d0f039681565b22e0f73598a01 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 11:21:35 -0400 Subject: [PATCH 15/37] Added `standalone` argument to fix failing test. --- tests/test_auto_set_primary_bpmn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_auto_set_primary_bpmn.py b/tests/test_auto_set_primary_bpmn.py index d6280b69..6179d2f4 100644 --- a/tests/test_auto_set_primary_bpmn.py +++ b/tests/test_auto_set_primary_bpmn.py @@ -13,7 +13,8 @@ class TestAutoSetPrimaryBPMN(BaseTest): category_id = session.query(WorkflowSpecCategoryModel).first().id # Add a workflow spec spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies', - description='Om nom nom delicious cookies', category_id=category_id) + description='Om nom nom delicious cookies', category_id=category_id, + standalone=False) rv = self.app.post('/v1.0/workflow-specification', headers=self.logged_in_headers(), content_type="application/json", From b73c2ff1cb0150bb4fc8567880ef0e316dfddf29 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 12:00:06 -0400 Subject: [PATCH 16/37] Added missing test for `get_workflow_from_workflow_spec` --- tests/workflow/test_workflow_spec_api.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/workflow/test_workflow_spec_api.py b/tests/workflow/test_workflow_spec_api.py index 25c9ef6a..2cb7e153 100644 --- a/tests/workflow/test_workflow_spec_api.py +++ b/tests/workflow/test_workflow_spec_api.py @@ -117,3 +117,11 @@ class TestWorkflowSpec(BaseTest): rv = self.app.get('/v1.0/workflow-specification/standalone', headers=self.logged_in_headers()) self.assertEqual(2, len(rv.json)) + + def test_get_workflow_from_workflow_spec(self): + self.load_example_data() + spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True) + rv = self.app.post(f'/v1.0/workflow-specification/{spec.id}', headers=self.logged_in_headers()) + self.assert_success(rv) + self.assertEqual('hello_world', rv.json['workflow_spec_id']) + self.assertEqual('Task_GetName', rv.json['next_task']['name']) From 0aa3e8f586becdca0e5a57d8b0bd2fc2bb3ba63a Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 12:41:50 -0400 Subject: [PATCH 17/37] added missing test for add_workflow_spec_category --- tests/workflow/test_workflow_spec_api.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/tests/workflow/test_workflow_spec_api.py b/tests/workflow/test_workflow_spec_api.py index 2cb7e153..3d93ffc1 100644 --- a/tests/workflow/test_workflow_spec_api.py +++ b/tests/workflow/test_workflow_spec_api.py @@ -3,7 +3,7 @@ import json from tests.base_test import BaseTest from crc import session from crc.models.file import FileModel -from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel +from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel, WorkflowSpecCategoryModelSchema from example_data import ExampleDataLoader @@ -125,3 +125,22 @@ class TestWorkflowSpec(BaseTest): self.assert_success(rv) self.assertEqual('hello_world', rv.json['workflow_spec_id']) self.assertEqual('Task_GetName', rv.json['next_task']['name']) + + def test_add_workflow_spec_category(self): + self.load_example_data() + count = session.query(WorkflowSpecCategoryModel).count() + category = WorkflowSpecCategoryModel( + id=count, + name='another_test_category', + display_name='Another Test Category', + display_order=0 + ) + rv = self.app.post(f'/v1.0/workflow-specification-category', + headers=self.logged_in_headers(), + content_type="application/json", + data=json.dumps(WorkflowSpecCategoryModelSchema().dump(category)) + ) + self.assert_success(rv) + result = session.query(WorkflowSpecCategoryModel).filter(WorkflowSpecCategoryModel.name=='another_test_category').first() + self.assertEqual('Another Test Category', result.display_name) + self.assertEqual(count, result.id) From 09d8c8bcbabaa1ad1595ebc910592a1169f268c9 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 26 Apr 2021 12:57:08 -0400 Subject: [PATCH 18/37] added missing test for update_workflow_spec_category --- tests/workflow/test_workflow_spec_api.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/workflow/test_workflow_spec_api.py b/tests/workflow/test_workflow_spec_api.py index 3d93ffc1..498ea37c 100644 --- a/tests/workflow/test_workflow_spec_api.py +++ b/tests/workflow/test_workflow_spec_api.py @@ -144,3 +144,20 @@ class TestWorkflowSpec(BaseTest): result = session.query(WorkflowSpecCategoryModel).filter(WorkflowSpecCategoryModel.name=='another_test_category').first() self.assertEqual('Another Test Category', result.display_name) self.assertEqual(count, result.id) + + def test_update_workflow_spec_category(self): + self.load_example_data() + category = session.query(WorkflowSpecCategoryModel).first() + category_name_before = category.name + new_category_name = category_name_before + '_asdf' + self.assertNotEqual(category_name_before, new_category_name) + + category.name = new_category_name + + rv = self.app.put(f'/v1.0/workflow-specification-category/{category.id}', + content_type="application/json", + headers=self.logged_in_headers(), + data=json.dumps(WorkflowSpecCategoryModelSchema().dump(category))) + self.assert_success(rv) + json_data = json.loads(rv.get_data(as_text=True)) + self.assertEqual(new_category_name, json_data['name']) From c029dad688cca2df45542e133fd1f7ac622273b5 Mon Sep 17 00:00:00 2001 From: Kelly McDonald Date: Wed, 28 Apr 2021 10:00:22 -0400 Subject: [PATCH 19/37] Convert some datetime columns to include timezone --- crc/models/task_event.py | 3 +- crc/models/workflow.py | 3 +- ...547305_update_type_on_task_events_table.py | 28 +++++++++++++++++++ 3 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 migrations/versions/abeffe547305_update_type_on_task_events_table.py diff --git a/crc/models/task_event.py b/crc/models/task_event.py index 21a239ee..40c8b1fd 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -5,6 +5,7 @@ from crc import db, ma from crc.models.study import StudyModel, StudySchema, WorkflowMetadataSchema, WorkflowMetadata from crc.models.workflow import WorkflowModel from crc.services.ldap_service import LdapService +from sqlalchemy import func class TaskEventModel(db.Model): @@ -27,7 +28,7 @@ class TaskEventModel(db.Model): mi_count = db.Column(db.Integer) mi_index = db.Column(db.Integer) process_name = db.Column(db.String) - date = db.Column(db.DateTime) + date = db.Column(db.DateTime(timezone=True),default=func.now()) class TaskEventModelSchema(SQLAlchemyAutoSchema): diff --git a/crc/models/workflow.py b/crc/models/workflow.py index 0da32aec..c39fee84 100644 --- a/crc/models/workflow.py +++ b/crc/models/workflow.py @@ -3,6 +3,7 @@ import enum import marshmallow from marshmallow import EXCLUDE from marshmallow_sqlalchemy import SQLAlchemyAutoSchema +from sqlalchemy import func from crc import db from crc.models.file import FileModel, FileDataModel @@ -87,7 +88,7 @@ class WorkflowModel(db.Model): workflow_spec = db.relationship("WorkflowSpecModel") total_tasks = db.Column(db.Integer, default=0) completed_tasks = db.Column(db.Integer, default=0) - last_updated = db.Column(db.DateTime) + last_updated = db.Column(db.DateTime(timezone=True),default=func.now()) # Order By is important or generating hashes on reviews. dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan", order_by="WorkflowSpecDependencyFile.file_data_id") diff --git a/migrations/versions/abeffe547305_update_type_on_task_events_table.py b/migrations/versions/abeffe547305_update_type_on_task_events_table.py new file mode 100644 index 00000000..297bac80 --- /dev/null +++ b/migrations/versions/abeffe547305_update_type_on_task_events_table.py @@ -0,0 +1,28 @@ +"""update type on task_events table and workflow table + +Revision ID: abeffe547305 +Revises: 665624ac29f1 +Create Date: 2021-04-28 08:51:16.220260 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'abeffe547305' +down_revision = '665624ac29f1' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("alter table task_event alter column date type timestamp with time zone") + op.execute("alter table workflow alter column last_updated type timestamp with time zone") + pass + + +def downgrade(): + op.execute("alter table task_event alter column date type timestamp without time zone") + op.execute("alter table workflow alter column last_updated type timestamp without time zone") + pass From dc6f1cc80d0a5d2e513d40b10d43f77287069160 Mon Sep 17 00:00:00 2001 From: Kelly McDonald Date: Wed, 28 Apr 2021 10:03:52 -0400 Subject: [PATCH 20/37] Emulate checks on keyword arguments and then return. Validate only should have no side effects so the previous behavior was a bug. --- crc/scripts/file_data_get.py | 4 +++- crc/scripts/file_data_set.py | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crc/scripts/file_data_get.py b/crc/scripts/file_data_get.py index 4fa2ca5b..edd060be 100644 --- a/crc/scripts/file_data_get.py +++ b/crc/scripts/file_data_get.py @@ -10,7 +10,9 @@ class FileDataGet(Script, DataStoreBase): return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """ def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): - self.do_task(task, study_id, workflow_id, *args, **kwargs) + if self.validate_kw_args(**kwargs): + myargs = [kwargs['key']] + return True def validate_kw_args(self,**kwargs): if kwargs.get('key',None) is None: diff --git a/crc/scripts/file_data_set.py b/crc/scripts/file_data_set.py index 8c5d0a49..99cbdd45 100644 --- a/crc/scripts/file_data_set.py +++ b/crc/scripts/file_data_set.py @@ -10,7 +10,11 @@ class FileDataSet(Script, DataStoreBase): return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'""" def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): - self.do_task(task, study_id, workflow_id, *args, **kwargs) + if self.validate_kw_args(**kwargs): + myargs = [kwargs['key'],kwargs['value']] + fileid = kwargs['file_id'] + del(kwargs['file_id']) + return True def validate_kw_args(self,**kwargs): if kwargs.get('key',None) is None: From 2b9cee6b89ae1d1efc781b861e4af9506c6cc4d3 Mon Sep 17 00:00:00 2001 From: Kelly McDonald Date: Thu, 29 Apr 2021 10:25:28 -0400 Subject: [PATCH 21/37] Update database to include timezone and change all points where we set the time on an event to be utc time. If we get something in the database with a timezone, it will display properly on the front end, but by default everything will be put in the database in UTC --- crc/api/data_store.py | 4 ++-- crc/api/study.py | 4 ++-- crc/scripts/data_store_base.py | 2 +- crc/services/file_service.py | 2 +- crc/services/study_service.py | 2 +- crc/services/workflow_processor.py | 2 +- crc/services/workflow_service.py | 4 ++-- tests/base_test.py | 4 ++-- tests/study/test_study_api.py | 2 +- tests/study/test_study_service.py | 2 +- tests/test_authentication.py | 2 +- tests/test_workflow_sync.py | 10 +++++----- 12 files changed, 20 insertions(+), 20 deletions(-) diff --git a/crc/api/data_store.py b/crc/api/data_store.py index 6299b77c..a87ff938 100644 --- a/crc/api/data_store.py +++ b/crc/api/data_store.py @@ -55,7 +55,7 @@ def update_datastore(id, body): raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.') DataStoreSchema().load(body, instance=item, session=session) - item.last_updated = datetime.now() + item.last_updated = datetime.utcnow() session.add(item) session.commit() return DataStoreSchema().dump(item) @@ -87,7 +87,7 @@ def add_datastore(body): 'but not more than one of these') item = DataStoreSchema().load(body) - item.last_updated = datetime.now() + item.last_updated = datetime.utcnow() session.add(item) session.commit() return DataStoreSchema().dump(item) diff --git a/crc/api/study.py b/crc/api/study.py index d91827b6..d0359438 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -23,7 +23,7 @@ def add_study(body): study_model = StudyModel(user_uid=UserService.current_user().uid, title=body['title'], primary_investigator_id=body['primary_investigator_id'], - last_updated=datetime.now(), + last_updated=datetime.utcnow(), status=StudyStatus.in_progress) session.add(study_model) StudyService.add_study_update_event(study_model, @@ -51,7 +51,7 @@ def update_study(study_id, body): study: Study = StudyForUpdateSchema().load(body) status = StudyStatus(study.status) - study_model.last_updated = datetime.now() + study_model.last_updated = datetime.utcnow() if study_model.status != status: study_model.status = status diff --git a/crc/scripts/data_store_base.py b/crc/scripts/data_store_base.py index 4a81988d..f9694fde 100644 --- a/crc/scripts/data_store_base.py +++ b/crc/scripts/data_store_base.py @@ -76,7 +76,7 @@ class DataStoreBase(object): workflow_id=workflow_id, spec_id=workflow_spec_id) study.value = args[1] - study.last_updated = datetime.now() + study.last_updated = datetime.utcnow() overwritten = self.overwritten(study.value, prev_value) session.add(study) session.commit() diff --git a/crc/services/file_service.py b/crc/services/file_service.py index 14e1b20e..222daddd 100644 --- a/crc/services/file_service.py +++ b/crc/services/file_service.py @@ -210,7 +210,7 @@ class FileService(object): new_file_data_model = FileDataModel( data=binary_data, file_model_id=file_model.id, file_model=file_model, - version=version, md5_hash=md5_checksum, date_created=datetime.now() + version=version, md5_hash=md5_checksum, date_created=datetime.utcnow() ) session.add_all([file_model, new_file_data_model]) session.commit() diff --git a/crc/services/study_service.py b/crc/services/study_service.py index 749a4e14..0f338032 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -496,7 +496,7 @@ class StudyService(object): workflow_model = WorkflowModel(status=WorkflowStatus.not_started, study=study, workflow_spec_id=spec.id, - last_updated=datetime.now()) + last_updated=datetime.utcnow()) session.add(workflow_model) session.commit() return workflow_model diff --git a/crc/services/workflow_processor.py b/crc/services/workflow_processor.py index c2ede980..5912086c 100644 --- a/crc/services/workflow_processor.py +++ b/crc/services/workflow_processor.py @@ -219,7 +219,7 @@ class WorkflowProcessor(object): self.workflow_model.status = self.get_status() self.workflow_model.total_tasks = len(tasks) self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states) - self.workflow_model.last_updated = datetime.now() + self.workflow_model.last_updated = datetime.utcnow() self.update_dependencies(self.spec_data_files) session.add(self.workflow_model) session.commit() diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 265b61e7..f9bea931 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -63,7 +63,7 @@ class WorkflowService(object): db.session.commit() workflow_model = WorkflowModel(status=WorkflowStatus.not_started, workflow_spec_id=spec_id, - last_updated=datetime.now(), + last_updated=datetime.utcnow(), study=study) return workflow_model @@ -714,7 +714,7 @@ class WorkflowService(object): mi_count=task.multi_instance_count, # This is the number of times the task could repeat. mi_index=task.multi_instance_index, # And the index of the currently repeating task. process_name=task.process_name, - date=datetime.now(), + date=datetime.utcnow(), ) db.session.add(task_event) db.session.commit() diff --git a/tests/base_test.py b/tests/base_test.py index 4663ac61..91b7999a 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase): { 'id': 0, 'title': 'The impact of fried pickles on beer consumption in bipedal software developers.', - 'last_updated': datetime.datetime.now(), + 'last_updated': datetime.datetime.utcnow(), 'status': StudyStatus.in_progress, 'primary_investigator_id': 'dhf8r', 'sponsor': 'Sartography Pharmaceuticals', @@ -80,7 +80,7 @@ class BaseTest(unittest.TestCase): { 'id': 1, 'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels', - 'last_updated': datetime.datetime.now(), + 'last_updated': datetime.datetime.utcnow(), 'status': StudyStatus.in_progress, 'primary_investigator_id': 'dhf8r', 'sponsor': 'Makerspace & Co.', diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index 79284f9a..409cc5c5 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -25,7 +25,7 @@ class TestStudyApi(BaseTest): TEST_STUDY = { "title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents " "for Interstellar Spacecraft", - "last_updated": datetime.now(tz=timezone.utc), + "last_updated": datetime.utcnow(), "primary_investigator_id": "tmm2x", "user_uid": "dhf8r", } diff --git a/tests/study/test_study_service.py b/tests/study/test_study_service.py index 1501597c..87f09361 100644 --- a/tests/study/test_study_service.py +++ b/tests/study/test_study_service.py @@ -47,7 +47,7 @@ class TestStudyService(BaseTest): self.assertIsNotNone(study.id) workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id, - status=WorkflowStatus.not_started, last_updated=datetime.now()) + status=WorkflowStatus.not_started, last_updated=datetime.utcnow()) db.session.add(workflow) db.session.commit() # Assure there is a master specification, one standard spec, and lookup tables. diff --git a/tests/test_authentication.py b/tests/test_authentication.py index bacb6ced..23cfe55e 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -273,7 +273,7 @@ class TestAuthentication(BaseTest): def _make_fake_study(self, uid): return { "title": "blah", - "last_updated": datetime.now(tz=timezone.utc), + "last_updated": datetime.utcnow(), "status": StudyStatus.in_progress, "primary_investigator_id": uid, "user_uid": uid, diff --git a/tests/test_workflow_sync.py b/tests/test_workflow_sync.py index 1a64c1b4..0d738859 100644 --- a/tests/test_workflow_sync.py +++ b/tests/test_workflow_sync.py @@ -52,7 +52,7 @@ class TestWorkflowSync(BaseTest): self.load_example_data() othersys = get_all_spec_state() rf2pos = get_random_fact_pos(othersys) - othersys[rf2pos]['date_created'] = str(datetime.now()) + othersys[rf2pos]['date_created'] = str(datetime.utcnow()) othersys[rf2pos]['md5_hash'] = '12345' mock_get.return_value = othersys response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock @@ -69,7 +69,7 @@ class TestWorkflowSync(BaseTest): self.load_example_data() othersys = get_all_spec_state() othersys.append({'workflow_spec_id':'my_new_workflow', - 'date_created':str(datetime.now()), + 'date_created':str(datetime.utcnow()), 'md5_hash': '12345'}) mock_get.return_value = othersys response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock @@ -121,7 +121,7 @@ class TestWorkflowSync(BaseTest): self.load_example_data() othersys = get_workflow_spec_files('random_fact') rf2pos = get_random_fact_2_pos(othersys) - othersys[rf2pos]['date_created'] = str(datetime.now()) + othersys[rf2pos]['date_created'] = str(datetime.utcnow()) othersys[rf2pos]['md5_hash'] = '12345' mock_get.return_value = othersys response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock @@ -145,7 +145,7 @@ class TestWorkflowSync(BaseTest): # change the remote file date and hash othersys = get_workflow_spec_files('random_fact') rf2pos = get_random_fact_2_pos(othersys) - othersys[rf2pos]['date_created'] = str(datetime.now()) + othersys[rf2pos]['date_created'] = str(datetime.utcnow()) othersys[rf2pos]['md5_hash'] = '12345' spec_files_mock.return_value = othersys # actually go get a different file @@ -179,7 +179,7 @@ class TestWorkflowSync(BaseTest): 'primary':False, 'content_type':'text/text', 'primary_process_id':None, - 'date_created':str(datetime.now()), + 'date_created':str(datetime.utcnow()), 'md5_hash':'12345' } othersys.append(newfile) From 24c818bf311454339ed7629c54ff1e2d4314dfaf Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Thu, 29 Apr 2021 14:29:21 -0400 Subject: [PATCH 22/37] Added study_id to workflow_api --- crc/api.yml | 4 +++- crc/models/api_models.py | 7 ++++--- crc/services/workflow_service.py | 3 ++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/crc/api.yml b/crc/api.yml index b9ed7df1..be95c2dd 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -427,7 +427,7 @@ paths: - name: spec_id in: path required: true - description: The unique id of an existing workflow specification to modify. + description: The unique id of an existing workflow specification. schema: type: string get: @@ -1640,6 +1640,8 @@ components: type: integer num_tasks_incomplete: type: integer + study_id: + type: integer example: id: 291234 diff --git a/crc/models/api_models.py b/crc/models/api_models.py index 2bb4abc6..308823fc 100644 --- a/crc/models/api_models.py +++ b/crc/models/api_models.py @@ -191,7 +191,7 @@ class DocumentDirectory(object): class WorkflowApi(object): def __init__(self, id, status, next_task, navigation, spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, - last_updated, is_review, title): + last_updated, is_review, title, study_id): self.id = id self.status = status self.next_task = next_task # The next task that requires user input. @@ -204,13 +204,14 @@ class WorkflowApi(object): self.last_updated = last_updated self.title = title self.is_review = is_review + self.study_id = study_id or '' class WorkflowApiSchema(ma.Schema): class Meta: model = WorkflowApi fields = ["id", "status", "next_task", "navigation", "workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks", - "last_updated", "is_review", "title"] + "last_updated", "is_review", "title", "study_id"] unknown = INCLUDE status = EnumField(WorkflowStatus) @@ -221,7 +222,7 @@ class WorkflowApiSchema(ma.Schema): def make_workflow(self, data, **kwargs): keys = ['id', 'status', 'next_task', 'navigation', 'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks", - "last_updated", "is_review", "title"] + "last_updated", "is_review", "title", "study_id"] filtered_fields = {key: data[key] for key in keys} filtered_fields['next_task'] = TaskSchema().make_task(data['next_task']) return WorkflowApi(**filtered_fields) diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 9edaf856..2369ceaf 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -408,7 +408,8 @@ class WorkflowService(object): completed_tasks=processor.workflow_model.completed_tasks, last_updated=processor.workflow_model.last_updated, is_review=is_review, - title=spec.display_name + title=spec.display_name, + study_id=processor.workflow_model.study_id or None ) if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks. # This may or may not work, sometimes there is no next task to complete. From eb153c3ffce6909f98b3ba5e434530bef5a8efd9 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Thu, 29 Apr 2021 14:29:43 -0400 Subject: [PATCH 23/37] Fixed column definition for standalone --- migrations/versions/8b976945a54e_.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrations/versions/8b976945a54e_.py b/migrations/versions/8b976945a54e_.py index 7805e31b..0973f193 100644 --- a/migrations/versions/8b976945a54e_.py +++ b/migrations/versions/8b976945a54e_.py @@ -18,7 +18,7 @@ depends_on = None def upgrade(): op.add_column('workflow', sa.Column('user_id', sa.String(), nullable=True)) - op.add_column('workflow_spec', sa.Column('standalone', sa.String(), default=False)) + op.add_column('workflow_spec', sa.Column('standalone', sa.Boolean(), default=False)) op.execute("UPDATE workflow_spec SET standalone=False WHERE standalone is null;") op.execute("ALTER TABLE task_event ALTER COLUMN study_id DROP NOT NULL") From a719cf4bf99db1f0d616ec6f465334fda8fc5d9f Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 30 Apr 2021 11:55:12 -0400 Subject: [PATCH 24/37] When retrieving the study, only update the status of underlying workflows if specifically requested. Record the size of a file in the database for quick access (this helps with a frontend refactor, so it isn't downloading the file just to see it's size) Cleaning up the timing/performance metric reporting to make it easier to read. Fixing a bug that prevented non-admins for getting the document-directory --- crc/api.yml | 8 ++++++++ crc/api/study.py | 4 ++-- crc/models/file.py | 12 +++++++++--- crc/services/cache_service.py | 4 ++-- crc/services/file_service.py | 5 ++++- crc/services/study_service.py | 2 +- crc/services/workflow_processor.py | 2 +- migrations/versions/62910318009f_.py | 28 ++++++++++++++++++++++++++++ tests/files/test_file_service.py | 2 +- tests/files/test_files_api.py | 12 ++++++------ 10 files changed, 62 insertions(+), 17 deletions(-) create mode 100644 migrations/versions/62910318009f_.py diff --git a/crc/api.yml b/crc/api.yml index 980d6d13..f8f4ee0f 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -83,6 +83,8 @@ paths: type : integer get: operationId: crc.api.file.get_document_directory + security: + - auth_admin: ['secret'] summary: Returns a directory of all files for study in a nested structure tags: - Document Categories @@ -349,6 +351,12 @@ paths: schema: type: integer format: int32 + - name: update_status + in: query + required: false + description: If set to true, will synch the study with protocol builder and assure the status of all workflows is up to date (expensive). + schema: + type: boolean get: operationId: crc.api.study.get_study summary: Provides a single study diff --git a/crc/api/study.py b/crc/api/study.py index d91827b6..ddec1e02 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -74,8 +74,8 @@ def update_study(study_id, body): return StudySchema().dump(study) -def get_study(study_id): - study = StudyService.get_study(study_id) +def get_study(study_id, update_status=False): + study = StudyService.get_study(study_id, do_status=update_status) if (study is None): raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404) return StudySchema().dump(study) diff --git a/crc/models/file.py b/crc/models/file.py index 8e73d12a..cdf29faf 100644 --- a/crc/models/file.py +++ b/crc/models/file.py @@ -1,7 +1,7 @@ import enum from typing import cast -from marshmallow import INCLUDE, EXCLUDE +from marshmallow import INCLUDE, EXCLUDE, fields, Schema from marshmallow_enum import EnumField from marshmallow_sqlalchemy import SQLAlchemyAutoSchema from sqlalchemy import func, Index @@ -65,11 +65,13 @@ class FileDataModel(db.Model): md5_hash = db.Column(UUID(as_uuid=True), unique=False, nullable=False) data = deferred(db.Column(db.LargeBinary)) # Don't load it unless you have to. version = db.Column(db.Integer, default=0) + size = db.Column(db.Integer, default=0) date_created = db.Column(db.DateTime(timezone=True), default=func.now()) file_model_id = db.Column(db.Integer, db.ForeignKey('file.id')) file_model = db.relationship("FileModel", foreign_keys=[file_model_id]) + class FileModel(db.Model): __tablename__ = 'file' id = db.Column(db.Integer, primary_key=True) @@ -117,11 +119,13 @@ class File(object): if data_model: instance.last_modified = data_model.date_created instance.latest_version = data_model.version + instance.size = data_model.size else: instance.last_modified = None instance.latest_version = None return instance + class FileModelSchema(SQLAlchemyAutoSchema): class Meta: model = FileModel @@ -132,17 +136,19 @@ class FileModelSchema(SQLAlchemyAutoSchema): type = EnumField(FileType) -class FileSchema(ma.Schema): +class FileSchema(Schema): class Meta: model = File fields = ["id", "name", "is_status", "is_reference", "content_type", "primary", "primary_process_id", "workflow_spec_id", "workflow_id", "irb_doc_code", "last_modified", "latest_version", "type", "categories", - "description", "category", "description", "download_name"] + "description", "category", "description", "download_name", "size"] + unknown = INCLUDE type = EnumField(FileType) + class LookupFileModel(db.Model): """Gives us a quick way to tell what kind of lookup is set on a form field. Connected to the file data model, so that if a new version of the same file is diff --git a/crc/services/cache_service.py b/crc/services/cache_service.py index ea6b1a6b..eb662fda 100644 --- a/crc/services/cache_service.py +++ b/crc/services/cache_service.py @@ -10,7 +10,7 @@ def firsttime(): def sincetime(txt,lasttime): thistime=firsttime() - print('%s runtime was %2f'%(txt,thistime-lasttime)) + print('%2.4f sec | %s' % (thistime-lasttime, txt)) return thistime def timeit(f): @@ -20,7 +20,7 @@ def timeit(f): ts = time.time() result = f(*args, **kw) te = time.time() - print('func:%r args:[%r, %r] took: %2.4f sec' % (f.__name__, args, kw, te-ts)) + print('%2.4f sec | func:%r args:[%r, %r] ' % (te-ts, f.__name__, args, kw)) return result return timed diff --git a/crc/services/file_service.py b/crc/services/file_service.py index 14e1b20e..2ecd0362 100644 --- a/crc/services/file_service.py +++ b/crc/services/file_service.py @@ -175,6 +175,8 @@ class FileService(object): order_by(desc(FileDataModel.date_created)).first() md5_checksum = UUID(hashlib.md5(binary_data).hexdigest()) + size = len(binary_data) + if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash): # This file does not need to be updated, it's the same file. If it is arhived, # then de-arvhive it. @@ -210,7 +212,8 @@ class FileService(object): new_file_data_model = FileDataModel( data=binary_data, file_model_id=file_model.id, file_model=file_model, - version=version, md5_hash=md5_checksum, date_created=datetime.now() + version=version, md5_hash=md5_checksum, date_created=datetime.now(), + size=size ) session.add_all([file_model, new_file_data_model]) session.commit() diff --git a/crc/services/study_service.py b/crc/services/study_service.py index 749a4e14..331d6a35 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -53,7 +53,7 @@ class StudyService(object): return studies @staticmethod - def get_study(study_id, study_model: StudyModel = None, do_status=True): + def get_study(study_id, study_model: StudyModel = None, do_status=False): """Returns a study model that contains all the workflows organized by category. IMPORTANT: This is intended to be a lightweight call, it should never involve loading up and executing all the workflows in a study to calculate information.""" diff --git a/crc/services/workflow_processor.py b/crc/services/workflow_processor.py index c2ede980..d9f3556c 100644 --- a/crc/services/workflow_processor.py +++ b/crc/services/workflow_processor.py @@ -42,7 +42,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine): """ return self.evaluate_expression(task, expression) - + @timeit def execute(self, task: SpiffTask, script, data): study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY] diff --git a/migrations/versions/62910318009f_.py b/migrations/versions/62910318009f_.py new file mode 100644 index 00000000..3915480f --- /dev/null +++ b/migrations/versions/62910318009f_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: 62910318009f +Revises: 665624ac29f1 +Create Date: 2021-04-28 14:09:57.648732 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '62910318009f' +down_revision = '665624ac29f1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file_data', sa.Column('size', sa.Integer(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file_data', 'size') + # ### end Alembic commands ### diff --git a/tests/files/test_file_service.py b/tests/files/test_file_service.py index 339943d4..53a644a0 100644 --- a/tests/files/test_file_service.py +++ b/tests/files/test_file_service.py @@ -72,7 +72,7 @@ class TestFileService(BaseTest): file_data = FileService.get_workflow_data_files(workflow_id=workflow.id) self.assertEqual(1, len(file_data)) self.assertEqual(2, file_data[0].version) - + self.assertEquals(4, file_data[0].size) # File dat size is included. def test_add_file_from_form_increments_version_and_replaces_on_subsequent_add_with_same_name(self): self.load_example_data() diff --git a/tests/files/test_files_api.py b/tests/files/test_files_api.py index ac146878..cce55fb5 100644 --- a/tests/files/test_files_api.py +++ b/tests/files/test_files_api.py @@ -181,11 +181,11 @@ class TestFilesApi(BaseTest): data['file'] = io.BytesIO(self.minimal_bpmn("abcdef")), 'my_new_file.bpmn' rv = self.app.post('/v1.0/file?workflow_spec_id=%s' % spec.id, data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) - json_data = json.loads(rv.get_data(as_text=True)) - file = FileModelSchema().load(json_data, session=session) + file_json = json.loads(rv.get_data(as_text=True)) + self.assertEquals(80, file_json['size']) data['file'] = io.BytesIO(self.minimal_bpmn("efghijk")), 'my_new_file.bpmn' - rv = self.app.put('/v1.0/file/%i/data' % file.id, data=data, follow_redirects=True, + rv = self.app.put('/v1.0/file/%i/data' % file_json['id'], data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) self.assert_success(rv) self.assertIsNotNone(rv.get_data()) @@ -193,14 +193,14 @@ class TestFilesApi(BaseTest): self.assertEqual(2, file_json['latest_version']) self.assertEqual(FileType.bpmn.value, file_json['type']) self.assertEqual("application/octet-stream", file_json['content_type']) - self.assertEqual(spec.id, file.workflow_spec_id) + self.assertEqual(spec.id, file_json['workflow_spec_id']) # Assure it is updated in the database and properly persisted. - file_model = session.query(FileModel).filter(FileModel.id == file.id).first() + file_model = session.query(FileModel).filter(FileModel.id == file_json['id']).first() file_data = FileService.get_file_data(file_model.id) self.assertEqual(2, file_data.version) - rv = self.app.get('/v1.0/file/%i/data' % file.id, headers=self.logged_in_headers()) + rv = self.app.get('/v1.0/file/%i/data' % file_json['id'], headers=self.logged_in_headers()) self.assert_success(rv) data = rv.get_data() self.assertIsNotNone(data) From 22b3230243f004bf21ea21a361d352ca0e60f9e6 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Sat, 1 May 2021 15:20:47 -0400 Subject: [PATCH 25/37] Added service `get_irb_info` to `crc.services.protocol_builder.ProtocolBuilderService` Added environment variable `PB_IRB_INFO_URL` for new `get_irb_info` service --- config/default.py | 1 + crc/services/protocol_builder.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/config/default.py b/config/default.py index 0aa4e254..de512b38 100644 --- a/config/default.py +++ b/config/default.py @@ -60,6 +60,7 @@ PB_INVESTIGATORS_URL = environ.get('PB_INVESTIGATORS_URL', default=PB_BASE_URL + PB_REQUIRED_DOCS_URL = environ.get('PB_REQUIRED_DOCS_URL', default=PB_BASE_URL + "required_docs?studyid=%i") PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL + "study?studyid=%i") PB_SPONSORS_URL = environ.get('PB_SPONSORS_URL', default=PB_BASE_URL + "sponsors?studyid=%i") +PB_IRB_INFO_URL = environ.get('PB_IRB_INFO_URL', default=PB_BASE_URL + "current_irb_info/%i") # Ldap Configuration LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http:// diff --git a/crc/services/protocol_builder.py b/crc/services/protocol_builder.py index 806055c7..6107280b 100644 --- a/crc/services/protocol_builder.py +++ b/crc/services/protocol_builder.py @@ -14,6 +14,7 @@ class ProtocolBuilderService(object): REQUIRED_DOCS_URL = app.config['PB_REQUIRED_DOCS_URL'] STUDY_DETAILS_URL = app.config['PB_STUDY_DETAILS_URL'] SPONSORS_URL = app.config['PB_SPONSORS_URL'] + IRB_INFO_URL = app.config['PB_IRB_INFO_URL'] @staticmethod def is_enabled(): @@ -55,6 +56,10 @@ class ProtocolBuilderService(object): def get_study_details(study_id) -> {}: return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.STUDY_DETAILS_URL) + @staticmethod + def get_irb_info(study_id) -> {}: + return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.IRB_INFO_URL) + @staticmethod def get_sponsors(study_id) -> {}: return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.SPONSORS_URL) From 1bcf4ea02e487b6a96f887e7d6ae34acbfb4e347 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Sat, 1 May 2021 15:23:07 -0400 Subject: [PATCH 26/37] Test and mock data for new `get_irb_info` service in ProtocolBuilderService --- tests/data/pb_responses/irb_info.json | 38 +++++++++++++++++++++++++++ tests/test_protocol_builder.py | 14 ++++++++++ 2 files changed, 52 insertions(+) create mode 100644 tests/data/pb_responses/irb_info.json diff --git a/tests/data/pb_responses/irb_info.json b/tests/data/pb_responses/irb_info.json new file mode 100644 index 00000000..6ad3b5f2 --- /dev/null +++ b/tests/data/pb_responses/irb_info.json @@ -0,0 +1,38 @@ +[ + { + "AGENDA_DATE": "2021-04-15T00:00:00+00:00", + "DATE_MODIFIED": "2021-04-15T00:00:00+00:00", + "IRBEVENT": "IRB Event 1", + "IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 1", + "IRB_OF_RECORD": "IRB of Record 1", + "IRB_REVIEW_TYPE": "IRB Review Type 1", + "IRB_STATUS": "IRB Status 1", + "STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 1", + "UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 1, + "UVA_STUDY_TRACKING": "UVA Study Tracking 1" + }, + { + "AGENDA_DATE": "2021-04-15T00:00:00+00:00", + "DATE_MODIFIED": "2021-04-15T00:00:00+00:00", + "IRBEVENT": "IRB Event 2", + "IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 2", + "IRB_OF_RECORD": "IRB of Record 2", + "IRB_REVIEW_TYPE": "IRB Review Type 2", + "IRB_STATUS": "IRB Status 2", + "STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 2", + "UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 2, + "UVA_STUDY_TRACKING": "UVA Study Tracking 2" + }, + { + "AGENDA_DATE": "2021-04-15T00:00:00+00:00", + "DATE_MODIFIED": "2021-04-15T00:00:00+00:00", + "IRBEVENT": "IRB Event 3", + "IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 3", + "IRB_OF_RECORD": "IRB of Record 3", + "IRB_REVIEW_TYPE": "IRB Review Type 3", + "IRB_STATUS": "IRB Status 3", + "STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 3", + "UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 3, + "UVA_STUDY_TRACKING": "UVA Study Tracking 3" + } +] \ No newline at end of file diff --git a/tests/test_protocol_builder.py b/tests/test_protocol_builder.py index b76f248e..1a90e51e 100644 --- a/tests/test_protocol_builder.py +++ b/tests/test_protocol_builder.py @@ -60,3 +60,17 @@ class TestProtocolBuilder(BaseTest): self.assertEqual(2, response[0]["SS_STUDY"]) self.assertEqual(2453, response[0]["SPONSOR_ID"]) self.assertEqual("Abbott Ltd", response[0]["SP_NAME"]) + + @patch('crc.services.protocol_builder.requests.get') + def test_get_irb_info(self, mock_get): + app.config['PB_ENABLED'] = True + mock_get.return_value.ok = True + mock_get.return_value.text = self.protocol_builder_response('irb_info.json') + response = ProtocolBuilderService.get_irb_info(self.test_study_id) + self.assertIsNotNone(response) + self.assertEqual(3, len(response)) + self.assertEqual('IRB Event 1', response[0]["IRBEVENT"]) + self.assertEqual('IRB Event 2', response[1]["IRBEVENT"]) + self.assertEqual('IRB Event 3', response[2]["IRBEVENT"]) + + print('test_get_irb_info') \ No newline at end of file From 060ee5076db2bd33156d86c15be3c4fe16f8913b Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Sat, 1 May 2021 15:52:41 -0400 Subject: [PATCH 27/37] New script to get IRB Info for a study --- crc/scripts/get_irb_info.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 crc/scripts/get_irb_info.py diff --git a/crc/scripts/get_irb_info.py b/crc/scripts/get_irb_info.py new file mode 100644 index 00000000..feba061a --- /dev/null +++ b/crc/scripts/get_irb_info.py @@ -0,0 +1,24 @@ +from crc.scripts.script import Script +from crc.api.common import ApiError + +from crc.services.protocol_builder import ProtocolBuilderService + + +class IRBInfo(Script): + + pb = ProtocolBuilderService() + + def get_description(self): + return """Returns the IRB Info data for a Study""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + return isinstance(study_id, int) + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + irb_info = self.pb.get_irb_info(study_id) + if isinstance(irb_info, dict): + return irb_info + else: + raise ApiError.from_task(code='missing_irb_info', + message=f'There was a problem retrieving IRB Info for study {study_id}.', + task=task) From 9465b6408dc99bf5d16b9cb4050ceb4e439964fa Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Sat, 1 May 2021 15:53:43 -0400 Subject: [PATCH 28/37] Test and workflow for new `get_irb_info` script --- .../data/irb_info_script/irb_info_script.bpmn | 52 +++++++++++++++++++ tests/test_irb_info_script.py | 16 ++++++ 2 files changed, 68 insertions(+) create mode 100644 tests/data/irb_info_script/irb_info_script.bpmn create mode 100644 tests/test_irb_info_script.py diff --git a/tests/data/irb_info_script/irb_info_script.bpmn b/tests/data/irb_info_script/irb_info_script.bpmn new file mode 100644 index 00000000..9ba223eb --- /dev/null +++ b/tests/data/irb_info_script/irb_info_script.bpmn @@ -0,0 +1,52 @@ + + + + + SequenceFlow_0xey0zw + + + + SequenceFlow_0xey0zw + SequenceFlow_03hympo + irb_info = get_irb_info() + + + + SequenceFlow_1s6cthx + + + + IRB Info: {{irb_info}} + SequenceFlow_03hympo + SequenceFlow_1s6cthx + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/test_irb_info_script.py b/tests/test_irb_info_script.py new file mode 100644 index 00000000..c7e62626 --- /dev/null +++ b/tests/test_irb_info_script.py @@ -0,0 +1,16 @@ +from tests.base_test import BaseTest +from crc.services.protocol_builder import ProtocolBuilderService + +from crc import app, session + + +class TestIRBInfo(BaseTest): + + def test_irb_info_script(self): + app.config['PB_ENABLED'] = True + workflow = self.create_workflow('irb_info_script') + irb_info = ProtocolBuilderService.get_irb_info(workflow.study_id) + workflow_api = self.get_workflow_api(workflow) + first_task = workflow_api.next_task + self.assertEqual('Task_PrintInfo', first_task.name) + self.assertEqual(f'IRB Info: {irb_info}', first_task.documentation) From 804926dbac64c850ab9a93da841cb338a4924d97 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Sat, 1 May 2021 15:55:20 -0400 Subject: [PATCH 29/37] Fixed problem where WorkflowService.make_test_workflow did not add a study to the workflow_model --- crc/services/workflow_service.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 265b61e7..421a8b65 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -57,10 +57,12 @@ class WorkflowService(object): user = db.session.query(UserModel).filter_by(uid="test").first() if not user: db.session.add(UserModel(uid="test")) + db.session.commit() study = db.session.query(StudyModel).filter_by(user_uid="test").first() if not study: db.session.add(StudyModel(user_uid="test", title="test")) db.session.commit() + study = db.session.query(StudyModel).filter_by(user_uid="test").first() workflow_model = WorkflowModel(status=WorkflowStatus.not_started, workflow_spec_id=spec_id, last_updated=datetime.now(), From c8446bfafd5e66e58da65f5f250e3c67ac7400f6 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 3 May 2021 14:51:27 -0400 Subject: [PATCH 30/37] Mock the Protocol Builder response in my test --- tests/test_irb_info_script.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/test_irb_info_script.py b/tests/test_irb_info_script.py index c7e62626..d0aba8a0 100644 --- a/tests/test_irb_info_script.py +++ b/tests/test_irb_info_script.py @@ -1,13 +1,18 @@ from tests.base_test import BaseTest -from crc.services.protocol_builder import ProtocolBuilderService from crc import app, session +from crc.services.protocol_builder import ProtocolBuilderService + +from unittest.mock import patch class TestIRBInfo(BaseTest): - def test_irb_info_script(self): + @patch('crc.services.protocol_builder.requests.get') + def test_irb_info_script(self, mock_get): app.config['PB_ENABLED'] = True + mock_get.return_value.ok = True + mock_get.return_value.text = self.protocol_builder_response('irb_info.json') workflow = self.create_workflow('irb_info_script') irb_info = ProtocolBuilderService.get_irb_info(workflow.study_id) workflow_api = self.get_workflow_api(workflow) From 576f3a661def8cdc8925a7b35b0ad00868df758d Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 3 May 2021 14:52:01 -0400 Subject: [PATCH 31/37] Removed print statement used while debugging. --- tests/test_protocol_builder.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_protocol_builder.py b/tests/test_protocol_builder.py index 1a90e51e..d82d7f0b 100644 --- a/tests/test_protocol_builder.py +++ b/tests/test_protocol_builder.py @@ -72,5 +72,3 @@ class TestProtocolBuilder(BaseTest): self.assertEqual('IRB Event 1', response[0]["IRBEVENT"]) self.assertEqual('IRB Event 2', response[1]["IRBEVENT"]) self.assertEqual('IRB Event 3', response[2]["IRBEVENT"]) - - print('test_get_irb_info') \ No newline at end of file From 7689281d685a1dc6d865e0d4eb65ae3ca475d835 Mon Sep 17 00:00:00 2001 From: mike cullerton Date: Mon, 3 May 2021 14:52:29 -0400 Subject: [PATCH 32/37] Fixed typo. Response should be list, not dict. --- crc/scripts/get_irb_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crc/scripts/get_irb_info.py b/crc/scripts/get_irb_info.py index feba061a..0404bdad 100644 --- a/crc/scripts/get_irb_info.py +++ b/crc/scripts/get_irb_info.py @@ -16,7 +16,7 @@ class IRBInfo(Script): def do_task(self, task, study_id, workflow_id, *args, **kwargs): irb_info = self.pb.get_irb_info(study_id) - if isinstance(irb_info, dict): + if irb_info: return irb_info else: raise ApiError.from_task(code='missing_irb_info', From 8f28970f920a88078f6f71be5cb4ae5c8551d60c Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 4 May 2021 13:39:49 -0400 Subject: [PATCH 33/37] Resolving an alembic conflict. Upgrading libraries, and resolving issues from that upgrade, including changes to how we manage tokens. This seems to be working locally. --- Pipfile.lock | 383 +++++++++------------------ crc/api/study.py | 2 +- crc/api/user.py | 2 +- migrations/versions/665624ac29f1_.py | 5 +- migrations/versions/c1449d1d1681_.py | 24 ++ tests/base_test.py | 2 +- tests/files/test_file_service.py | 2 +- tests/study/test_study_api.py | 7 +- tests/test_authentication.py | 10 +- 9 files changed, 170 insertions(+), 267 deletions(-) create mode 100644 migrations/versions/c1449d1d1681_.py diff --git a/Pipfile.lock b/Pipfile.lock index 4064fec4..099e141e 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -25,19 +25,11 @@ }, "alembic": { "hashes": [ - "sha256:8a259f0a4c8b350b03579d77ce9e810b19c65bf0af05f84efb69af13ad50801e", - "sha256:e27fd67732c97a1c370c33169ef4578cf96436fa0e7dcfaeeef4a917d0737d56" + "sha256:3ff4f90d23dd283d7822d78ffbc07cb256344ae1d60500b933378bc13407efcc", + "sha256:d7f6d4dc6abed18e1591932a85349a7d621298ef0daa40021609cdca54a6047c" ], "index": "pypi", - "version": "==1.5.8" - }, - "amqp": { - "hashes": [ - "sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2", - "sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.6" + "version": "==1.6.0" }, "aniso8601": { "hashes": [ @@ -51,16 +43,14 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "babel": { "hashes": [ - "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", - "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" + "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9", + "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.0" + "version": "==2.9.1" }, "bcrypt": { "hashes": [ @@ -72,7 +62,6 @@ "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1", "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d" ], - "markers": "python_version >= '3.6'", "version": "==3.2.0" }, "beautifulsoup4": { @@ -83,27 +72,12 @@ ], "version": "==4.9.3" }, - "billiard": { - "hashes": [ - "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", - "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" - ], - "version": "==3.6.4.0" - }, "blinker": { "hashes": [ "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" ], "version": "==1.4" }, - "celery": { - "hashes": [ - "sha256:5e8d364e058554e83bbb116e8377d90c79be254785f357cb2cec026e79febe13", - "sha256:f4efebe6f8629b0da2b8e529424de376494f5b7a743c321c8a2ddc2b1414921c" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.5" - }, "certifi": { "hashes": [ "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", @@ -158,7 +132,6 @@ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.0.0" }, "click": { @@ -166,29 +139,8 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, - "click-didyoumean": { - "hashes": [ - "sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb" - ], - "version": "==0.0.3" - }, - "click-plugins": { - "hashes": [ - "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", - "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" - ], - "version": "==1.1.1" - }, - "click-repl": { - "hashes": [ - "sha256:9c4c3d022789cae912aad8a3f5e1d7c2cdd016ee1225b5212ad3e8691563cda5", - "sha256:b9f29d52abc4d6059f8e276132a111ab8d94980afe6a5432b9d996544afa95d5" - ], - "version": "==0.1.6" - }, "clickclick": { "hashes": [ "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c", @@ -203,14 +155,6 @@ ], "version": "==0.9.1" }, - "configparser": { - "hashes": [ - "sha256:85d5de102cfe6d14a5172676f09d19c465ce63d6019cf0a4ef13385fc535e828", - "sha256:af59f2cdd7efbdd5d111c1976ecd0b82db9066653362f0962d7bf1d3ab89a1fa" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.2" - }, "connexion": { "extras": [ "swagger-ui" @@ -285,30 +229,29 @@ "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771", "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.12" }, "docutils": { "hashes": [ - "sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf", - "sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c" + "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", + "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.17" + "version": "==0.16" }, "docxtpl": { "hashes": [ - "sha256:18b81c072254b2eef3cbf878951a93515f4d287c319856d3dd7ada53a3ffd5ad", - "sha256:c26ad7a0e6e9aedc7dfaf6b09af60ee0a4f001a065896503056ce9171bd9024a" + "sha256:b7370b11fd226a712fcbf36ff45d34993253cce268846ee1c8309cef649cffa2", + "sha256:d7b78a5da704800c53e4434a7ef397faa19e56d21595af23f70b89a84e56ac1d" ], "index": "pypi", - "version": "==0.11.3" + "version": "==0.11.4" }, "et-xmlfile": { "hashes": [ - "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" + "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c", + "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada" ], - "version": "==1.0.1" + "version": "==1.1.0" }, "flask": { "hashes": [ @@ -320,10 +263,10 @@ }, "flask-admin": { "hashes": [ - "sha256:145f59407d78319925e20f7c3021f60c71f0cacc98e916e52000845dc4c63621" + "sha256:eb06a1f31b98881dee53a55c64faebd1990d6aac38826364b280df0b2679ff74" ], "index": "pypi", - "version": "==1.5.7" + "version": "==1.5.8" }, "flask-bcrypt": { "hashes": [ @@ -376,16 +319,8 @@ "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912", "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.5.1" }, - "future": { - "hashes": [ - "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.18.2" - }, "greenlet": { "hashes": [ "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", @@ -437,6 +372,7 @@ }, "gunicorn": { "hashes": [ + "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e", "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8" ], "index": "pypi", @@ -454,7 +390,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -462,7 +397,6 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "inflection": { @@ -470,7 +404,6 @@ "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2" ], - "markers": "python_version >= '3.5'", "version": "==0.5.1" }, "isodate": { @@ -485,7 +418,6 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jinja2": { @@ -493,7 +425,6 @@ "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.3" }, "jsonschema": { @@ -503,21 +434,10 @@ ], "version": "==3.2.0" }, - "kombu": { - "hashes": [ - "sha256:6dc509178ac4269b0e66ab4881f70a2035c33d3a622e20585f965986a5182006", - "sha256:f4965fba0a4718d47d470beeb5d6446e3357a62402b16c510b6a2f251e05ac3c" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.2" - }, "ldap3": { "hashes": [ "sha256:18c3ee656a6775b9b0d60f7c6c5b094d878d1d90fc03d56731039f0a4b546a91", - "sha256:afc6fc0d01f02af82cd7bfabd3bbfd5dc96a6ae91e97db0a2dab8a0f1b436056", - "sha256:c1df41d89459be6f304e0ceec4b00fdea533dbbcd83c802b1272dcdb94620b57", - "sha256:8c949edbad2be8a03e719ba48bd6779f327ec156929562814b3e84ab56889c8c", - "sha256:4139c91f0eef9782df7b77c8cbc6243086affcb6a8a249b768a9658438e5da59" + "sha256:c1df41d89459be6f304e0ceec4b00fdea533dbbcd83c802b1272dcdb94620b57" ], "index": "pypi", "version": "==2.9" @@ -527,18 +447,24 @@ "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d", "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3", "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2", + "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae", "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f", "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927", "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3", "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7", + "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59", "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f", "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade", + "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96", "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468", "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b", "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4", + "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354", "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83", "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04", + "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16", "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791", + "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a", "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51", "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1", "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a", @@ -551,10 +477,14 @@ "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa", "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106", "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d", + "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617", "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4", + "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92", "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0", "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4", + "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24", "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2", + "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e", "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0", "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654", "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2", @@ -569,7 +499,6 @@ "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab", "sha256:aea166356da44b9b830c8023cd9b557fa856bd8b4035d6de771ca027dfc5cc6e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.4" }, "markdown": { @@ -635,7 +564,6 @@ "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "marshmallow": { @@ -656,11 +584,11 @@ }, "marshmallow-sqlalchemy": { "hashes": [ - "sha256:b217c6327bcf291e843dc1c2c20f0915061d4ecc303f0c5be40f23206607f702", - "sha256:ee3ead3b83de6608c6850ff60515691b0dc556ca226680f8a82b9f785cdb71b1" + "sha256:f1491f83833ac9c8406ba603458b1447fdfd904194833aab4b3cc01ef3646944", + "sha256:f861888ae3299f2c1f18cd94f02147ced70cd1b4986b2c5077e4a1036018d2a2" ], "index": "pypi", - "version": "==0.24.2" + "version": "==0.25.0" }, "numpy": { "hashes": [ @@ -689,7 +617,6 @@ "sha256:e9459f40244bb02b2f14f6af0cd0732791d72232bbb0dc4bab57ef88e75f6935", "sha256:edb1f041a9146dcf02cd7df7187db46ab524b9af2515f392f337c7cbbf5b52cd" ], - "markers": "python_version >= '3.7'", "version": "==1.20.2" }, "openapi-schema-validator": { @@ -698,7 +625,6 @@ "sha256:a4b2712020284cee880b4c55faa513fbc2f8f07f365deda6098f8ab943c9f0df", "sha256:b65d6c2242620bfe76d4c749b61cd9657e4528895a8f4fb6f916085b508ebd24" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.1.5" }, "openapi-spec-validator": { @@ -707,7 +633,6 @@ "sha256:53ba3d884e98ff2062d5ada025aa590541dcd665b8f81067dc82dd61c0923759", "sha256:e11df7c559339027bd04f2399bc82474983129a6a7a6a0421eaa95e2c844d686" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.3.0" }, "openpyxl": { @@ -723,38 +648,29 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pandas": { "hashes": [ - "sha256:09761bf5f8c741d47d4b8b9073288de1be39bbfccc281d70b889ade12b2aad29", - "sha256:0f27fd1adfa256388dc34895ca5437eaf254832223812afd817a6f73127f969c", - "sha256:43e00770552595c2250d8d712ec8b6e08ca73089ac823122344f023efa4abea3", - "sha256:46fc671c542a8392a4f4c13edc8527e3a10f6cb62912d856f82248feb747f06e", - "sha256:475b7772b6e18a93a43ea83517932deff33954a10d4fbae18d0c1aba4182310f", - "sha256:4d821b9b911fc1b7d428978d04ace33f0af32bb7549525c8a7b08444bce46b74", - "sha256:5e3c8c60541396110586bcbe6eccdc335a38e7de8c217060edaf4722260b158f", - "sha256:621c044a1b5e535cf7dcb3ab39fca6f867095c3ef223a524f18f60c7fee028ea", - "sha256:72ffcea00ae8ffcdbdefff800284311e155fbb5ed6758f1a6110fc1f8f8f0c1c", - "sha256:8a051e957c5206f722e83f295f95a2cf053e890f9a1fba0065780a8c2d045f5d", - "sha256:97b1954533b2a74c7e20d1342c4f01311d3203b48f2ebf651891e6a6eaf01104", - "sha256:9f5829e64507ad10e2561b60baf285c470f3c4454b007c860e77849b88865ae7", - "sha256:a93e34f10f67d81de706ce00bf8bb3798403cabce4ccb2de10c61b5ae8786ab5", - "sha256:d59842a5aa89ca03c2099312163ffdd06f56486050e641a45d926a072f04d994", - "sha256:dbb255975eb94143f2e6ec7dadda671d25147939047839cd6b8a4aff0379bb9b", - "sha256:df6f10b85aef7a5bb25259ad651ad1cc1d6bb09000595cab47e718cbac250b1d" + "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4", + "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a", + "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b", + "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895", + "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f", + "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded", + "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12", + "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279", + "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858", + "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa", + "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4", + "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758", + "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558", + "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686", + "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6", + "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950" ], "index": "pypi", - "version": "==1.2.3" - }, - "prompt-toolkit": { - "hashes": [ - "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04", - "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc" - ], - "markers": "python_full_version >= '3.6.1'", - "version": "==3.0.18" + "version": "==1.2.4" }, "psycopg2-binary": { "hashes": [ @@ -799,19 +715,8 @@ }, "pyasn1": { "hashes": [ - "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", - "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", - "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", - "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", - "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", - "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", - "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", - "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", - "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", - "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8" + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" ], "version": "==0.4.8" }, @@ -820,56 +725,67 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygithub": { "hashes": [ - "sha256:300bc16e62886ca6537b0830e8f516ea4bc3ef12d308e0c5aff8bdbd099173d4", - "sha256:87afd6a67ea582aa7533afdbf41635725f13d12581faed7e3e04b1579c0c0627" + "sha256:1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283", + "sha256:2caf0054ea079b71e539741ae56c5a95e073b81fa472ce222e81667381b9601b" ], "index": "pypi", - "version": "==1.54.1" + "version": "==1.55" }, "pygments": { "hashes": [ - "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", - "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" + "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", + "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" ], - "markers": "python_version >= '3.5'", - "version": "==2.8.1" + "version": "==2.9.0" }, "pyjwt": { "hashes": [ - "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", - "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96" + "sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1", + "sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130" ], "index": "pypi", - "version": "==1.7.1" + "version": "==2.1.0" + }, + "pynacl": { + "hashes": [ + "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4", + "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4", + "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574", + "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d", + "sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634", + "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25", + "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f", + "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505", + "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122", + "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7", + "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420", + "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f", + "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96", + "sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6", + "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6", + "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514", + "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff", + "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80" + ], + "version": "==1.4.0" }, "pyparsing": { "hashes": [ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { "hashes": [ "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" ], - "markers": "python_version >= '3.5'", "version": "==0.17.3" }, - "python-box": { - "hashes": [ - "sha256:4ed4ef5d34de505a65c01e3f1911de8cdb29484fcae0c035141dce535c6c194a", - "sha256:f2a531f9f5bbef078c175fad6abb31e9b59d40d121ea79993197e6bb221c6be6" - ], - "markers": "python_version >= '3.6'", - "version": "==5.3.0" - }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -887,10 +803,8 @@ "python-editor": { "hashes": [ "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", - "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", - "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", - "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", - "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b" + "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" ], "version": "==1.0.4" }, @@ -940,7 +854,6 @@ "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", "version": "==5.4.1" }, "recommonmark": { @@ -975,7 +888,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -995,18 +907,17 @@ }, "sphinx": { "hashes": [ - "sha256:3f01732296465648da43dec8fb40dc451ba79eb3e2cc5c6d79005fd98197107d", - "sha256:ce9c228456131bab09a3d7d10ae58474de562a6f79abb3dc811ae401cf8c1abc" + "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1", + "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8" ], "index": "pypi", - "version": "==3.5.3" + "version": "==3.5.4" }, "sphinxcontrib-applehelp": { "hashes": [ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -1014,7 +925,6 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -1022,7 +932,6 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -1030,7 +939,6 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], - "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -1038,7 +946,6 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -1046,52 +953,50 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "spiffworkflow": { "git": "https://github.com/sartography/SpiffWorkflow.git", - "ref": "382048e31e872d23188fab6bec68323f593ccc19" + "ref": "54a3cda7c40b7ee35e0b21ede92606d1097279dd" }, "sqlalchemy": { "hashes": [ - "sha256:02b039e0e7e6de2f15ea2d2de3995e31a170e700ec0b37b4eded662171711d19", - "sha256:08943201a1e3c6238e48f4d5d56c27ea1e1b39d3d9f36a9d81fc3cfb0e1b83bd", - "sha256:0ee0054d4a598d2920cae14bcbd33e200e02c5e3b47b902627f8cf5d4c9a2a4b", - "sha256:11e7a86209f69273e75d2dd64b06c0c2660e39cd942fce2170515c404ed7358a", - "sha256:1294f05916c044631fd626a4866326bbfbd17f62bd37510d000afaef4b35bd74", - "sha256:2f11b5783933bff55291ca06496124347627d211ff2e509e846af1c35de0a3fb", - "sha256:301d0cd6ef1dc73b607748183da857e712d6f743de8d92b1e1f8facfb0ba2aa2", - "sha256:344b58b4b4193b72e8b768a51ef6eb5a4c948ce313a0f23e2ea081e71ce8ac0e", - "sha256:44e11a06168782b6d485daef197783366ce7ab0d5eea0066c899ae06cef47bbc", - "sha256:45b091ccbf94374ed14abde17e9a04522b0493a17282eaaf4383efdd413f5243", - "sha256:48540072f43b3c080159ec1f24a4b014c0ee83d3b73795399974aa358a8cf71b", - "sha256:4df07161897191ed8d4a0cfc92425c81296160e5c5f76c9256716d3085172883", - "sha256:4f7ce3bfdab6520554af4a5b1df4513d45388624d015ba4d921daf48ce1d6503", - "sha256:5361e25181b9872d6906c8c9be7dc05cb0a0951d71ee59ee5a71c1deb301b8a8", - "sha256:6f8fdad2f335d2f3ca2f3ee3b01404f7abcf519b03de2c510f1f42d16e39ffb4", - "sha256:70a1387396ea5b3022539b560c287daf79403d8b4b365f89b56d660e625a4457", - "sha256:7481f9c2c832a3bf37c80bee44d91ac9938b815cc06f7e795b976e300914aab9", - "sha256:7c0c7bb49167ac738ca6ee6e7f94a9988a7e4e261d8da335341e8c8c8f3b2e9b", - "sha256:7de84feb31af3d8fdf819cac2042928d0b60d3cb16f49c4b2f48d88db46e79f6", - "sha256:7f5087104c3c5af11ea59e49ae66c33ca98b14a47d3796ae97498fca53f84aef", - "sha256:81badd7d3e0e6aba70a5d1b50fabe8112e9835a6fdb0684054c3fe5378ce0d01", - "sha256:82f11b679df91275788be6734dd4a9dfa29bac67b85326992609f62b05bdab37", - "sha256:8301ecf3e819eb5dbc171e84654ff60872807775301a55fe35b0ab2ba3742031", - "sha256:8d6a9feb5efd2fdab25c6d5a0a5589fed9d789f5ec57ec12263fd0e60ce1dea6", - "sha256:915d4fa08776c0252dc5a34fa15c6490f66f411ea1ac9492022f98875d6baf20", - "sha256:94040a92b6676f9ffdab6c6b479b3554b927a635c90698c761960b266b04fc88", - "sha256:a08027ae84efc563f0f2f341dda572eadebeca38c0ae028a009988f27e9e6230", - "sha256:a103294583383660d9e06dbd82037dc8e94c184bdcb27b2be44ae4457dafc6b4", - "sha256:c22bfac8d3b955cdb13f0fcd6343156bf56d925196cf7d9ab9ce9f61d3f1e11c", - "sha256:c3810ebcf1d42c532c8f5c3f442c705d94442a27a32f2df5344f0857306ab321", - "sha256:ee4ddc904fb6414b5118af5b8d45e428aac2ccda01326b2ba2fe4354b0d8d1ae", - "sha256:f16801795f1ffe9472360589a04301018c79e4582a85e68067275bb4f765e4e2", - "sha256:f62c57ceadedeb8e7b98b48ac4d684bf2b0f73b9d882fed3ca260d9aedf6403f", - "sha256:fbb0fda1c574975807aceb0e2332e0ecfe9e5656c191ed482c1a5eafe7a33823" + "sha256:08a00a955c5cb1d3a610f9735e0e9ca64f2fd2540c942ab84dc9a71433940f86", + "sha256:1b2b0199153a4ecbb57ec09ff8a3693dcb2c134fef217379e2761f27bccf3a14", + "sha256:1d8a71c2bf21437d6216ba1963507d4d1a37920429eafd09d85387d0d078fa5a", + "sha256:36bcf7530ca070e89f29e2f6e05c5566c9ab3a2e493608437a230253ecf112a7", + "sha256:375cde7038d3c4493e2e61273ed2a3be04b5845e9bea5c662543c22935fb439b", + "sha256:384c0ecc845b597eda2519de2f8dd66770e76f8f39e0d21f00dd5affaf293787", + "sha256:46737cd87a57e03ab20e79d29ad931b842e7b3226a169ae9b36babe69d92256f", + "sha256:49fc18facca9ecb29308e486de53e7d9ab7d7b02d6705158fa34af0c1a6c3b0b", + "sha256:4b9e7764638910c43eea6e6e367395dce3d1c6acc17f8550e66cd913725491d2", + "sha256:50dba4adb0f7cafb5c05e3e9734b7d84f0b009daf17ca5a3c1560be7dbcaaba7", + "sha256:586eb3698e616fe044472e7a249d24a5b05dc5c714dc0b9744417031988df3af", + "sha256:58bee8384a7e32846e560da0ad595cf0dd5046b286aafa8d000312c5db8899bf", + "sha256:5e7e9a7092aea03c68318d390f39dab75422143354543244b6e1b2b31848a494", + "sha256:6adf973e7e27bce34c6bb14f62368b99e53a55226836ac93ff1352fe467dc966", + "sha256:6d01d83d290db9e27ea02183e56ba548a48143b3b1b7977d07cedafc3606f91d", + "sha256:6f0bd9b2cf1c555c6bfbb71d58750d096f7462a582abf6994cff80fbfe0d8c94", + "sha256:74cd7afd1789eabe42c838747c5680d78317aee448a22de75638ac0735ae3284", + "sha256:79286d63e5f92340357bc2a0801637b2accc95d7e0044768c3eea5e8271cc300", + "sha256:8162f379edc3c1c0c4ac7436b3a8baa8ca7754913ed81002f631bc066486803e", + "sha256:85bd128ebb3c47615496778fedbe334094cf6133c6933804e237c741fce4f20c", + "sha256:8a00c3494a1553e171c77505653cca22f5fadf09a0af4a020243f1baaad412b3", + "sha256:8dd79b534516b9b792dbb319324962d02c69a50a390cb2387e360bebe5d7b280", + "sha256:938e819bc74c95466c7f6d5dc7e2d08142c116c380992aa36d60e64e7a62ffe7", + "sha256:98270f1c52dc4a62279aee7c0a134e84182372e4b3c7ee35cafd906c11f4e218", + "sha256:9c2afd9ad52387d32b2a856b19352d605213a06b4684a3b469ff8f39a27fb3a2", + "sha256:a35d909327a1c3bc407689179101af93de34bc6af8c6f07d5d29e4eaab54a9f4", + "sha256:a63848afe8f909d1dcea286c3856c1cc1de6e8908e9ce1bdb672c9f19b2d2aa7", + "sha256:b12b39ded8cee6c4fdd0b8aa5afdb8cb5641098f2625acc9175effdc064b5c9f", + "sha256:b53a0faf32cde49eb04ad81f8ff60cfa1dcc024aa6a6bb8b545621339395e640", + "sha256:c9937cb1061042fb09c4b622884407525a0a595e300ef199d80a7290ca2c71ea", + "sha256:e21ca6ecf2a48a53856562af3380f2a64a1ce08ae2d17c800095f4685ab499b1", + "sha256:e25d48233f5501b41c7d561cfd9ec9c89a891643aaf282750c129d627cc5a547", + "sha256:e288a3640c3c9311bb223c13e6ecb2ae4c5fb018756b5fbf82b9a1f13c6c6111", + "sha256:ed96e1f28708c5a00fb371971d6634210afdcabb439dd488d41e1cfc2c906459" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.4.5" + "version": "==1.4.13" }, "swagger-ui-bundle": { "hashes": [ @@ -1106,38 +1011,20 @@ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.4" }, - "vine": { - "hashes": [ - "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30", - "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.0" - }, "waitress": { "hashes": [ "sha256:29af5a53e9fb4e158f525367678b50053808ca6c21ba585754c77d790008c746", "sha256:69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897" ], - "markers": "python_full_version >= '3.6.0'", "version": "==2.0.0" }, - "wcwidth": { - "hashes": [ - "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", - "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" - ], - "version": "==0.2.5" - }, "webob": { "hashes": [ "sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b", "sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.7" }, "webtest": { @@ -1179,11 +1066,11 @@ }, "xlsxwriter": { "hashes": [ - "sha256:2b7e22b1268c2ed85d73e5629097c9a63357f2429667ada9863cd05ff8ee33aa", - "sha256:30ebc19d0f201fafa34a6c622050ed2a268ac8dee24037a61605caa801dc8af5" + "sha256:1a6dd98892e8010d3e089d1cb61385baa8f76fa547598df2c221cc37238c72d3", + "sha256:82be5a58c09bdc2ff8afc25acc815c465275239ddfc56d6e7b2a7e6c5d2e213b" ], "index": "pypi", - "version": "==1.3.8" + "version": "==1.4.0" } }, "develop": { @@ -1192,7 +1079,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "coverage": { @@ -1265,23 +1151,21 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pbr": { "hashes": [ - "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9", - "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00" + "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd", + "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4" ], "index": "pypi", - "version": "==5.5.1" + "version": "==5.6.0" }, "pluggy": { "hashes": [ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { @@ -1289,7 +1173,6 @@ "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.10.0" }, "pyparsing": { @@ -1297,23 +1180,21 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pytest": { "hashes": [ - "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634", - "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc" + "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b", + "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890" ], "index": "pypi", - "version": "==6.2.3" + "version": "==6.2.4" }, "toml": { "hashes": [ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" } } diff --git a/crc/api/study.py b/crc/api/study.py index fe5f8da8..bd4cf01b 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -33,7 +33,7 @@ def add_study(body): errors = StudyService._add_all_workflow_specs_to_study(study_model) session.commit() - study = StudyService().get_study(study_model.id) + study = StudyService().get_study(study_model.id, do_status=True) study_data = StudySchema().dump(study) study_data["errors"] = ApiErrorSchema(many=True).dump(errors) return study_data diff --git a/crc/api/user.py b/crc/api/user.py index bbd51130..8cdbbf47 100644 --- a/crc/api/user.py +++ b/crc/api/user.py @@ -214,7 +214,7 @@ def _handle_login(user_info: LdapModel, redirect_url=None): g.user = user # Return the frontend auth callback URL, with auth token appended. - auth_token = user.encode_auth_token().decode() + auth_token = user.encode_auth_token() g.token = auth_token if redirect_url is not None: diff --git a/migrations/versions/665624ac29f1_.py b/migrations/versions/665624ac29f1_.py index c9053a57..30171d03 100644 --- a/migrations/versions/665624ac29f1_.py +++ b/migrations/versions/665624ac29f1_.py @@ -19,12 +19,11 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('data_store', sa.Column('file_id', sa.Integer(), nullable=True)) - op.create_foreign_key(None, 'data_store', 'file', ['file_id'], ['id']) + op.create_foreign_key('file_id_key', 'data_store', 'file', ['file_id'], ['id']) # ### end Alembic commands ### - def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(None, 'data_store', type_='foreignkey') + op.drop_constraint('file_id_key', 'data_store', type_='foreignkey') op.drop_column('data_store', 'file_id') # ### end Alembic commands ### diff --git a/migrations/versions/c1449d1d1681_.py b/migrations/versions/c1449d1d1681_.py new file mode 100644 index 00000000..e23055ce --- /dev/null +++ b/migrations/versions/c1449d1d1681_.py @@ -0,0 +1,24 @@ +"""empty message + +Revision ID: c1449d1d1681 +Revises: abeffe547305, 8b976945a54e, 62910318009f +Create Date: 2021-05-04 13:20:55.447143 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c1449d1d1681' +down_revision = ('abeffe547305', '8b976945a54e', '62910318009f') +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass diff --git a/tests/base_test.py b/tests/base_test.py index b19f3e06..5876e503 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -131,7 +131,7 @@ class BaseTest(unittest.TestCase): user = UserService.current_user(allow_admin_impersonate=True) self.assertEqual(uid, user.uid, 'Logged in user should match given user uid') - return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode()) + return dict(Authorization='Bearer ' + user_model.encode_auth_token()) def delete_example_data(self, use_crc_data=False, use_rrt_data=False): """ diff --git a/tests/files/test_file_service.py b/tests/files/test_file_service.py index 53a644a0..94f5945f 100644 --- a/tests/files/test_file_service.py +++ b/tests/files/test_file_service.py @@ -15,7 +15,7 @@ class FakeGithubCreates(Mock): def get_repo(var, name): class FakeRepo(Mock): def get_contents(var, filename, ref): - raise UnknownObjectException(status='Failure', data='Failed data') + raise UnknownObjectException(status='Failure', data='Failed data', headers=[]) def update_file(var, path, message, content, sha, branch): pass return FakeRepo() diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index 409cc5c5..f3ef1aff 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -1,8 +1,8 @@ import json from profile import Profile +from tests.base_test import BaseTest from crc.services.ldap_service import LdapService -from tests.base_test import BaseTest from datetime import datetime, timezone from unittest.mock import patch @@ -113,10 +113,9 @@ class TestStudyApi(BaseTest): self.assertEqual(study["ind_number"], db_study.ind_number) self.assertEqual(study["user_uid"], db_study.user_uid) - workflow_spec_count =session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == False).count() + workflow_spec_count =session.query(WorkflowSpecModel).count() workflow_count = session.query(WorkflowModel).filter(WorkflowModel.study_id == study['id']).count() - error_count = len(study["errors"]) - self.assertEqual(workflow_spec_count, workflow_count + error_count) + self.assertEqual(workflow_spec_count, workflow_count) study_event = session.query(StudyEvent).first() self.assertIsNotNone(study_event) diff --git a/tests/test_authentication.py b/tests/test_authentication.py index 23cfe55e..c2199624 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -33,7 +33,7 @@ class TestAuthentication(BaseTest): user_1 = UserModel(uid="dhf8r") expected_exp_1 = timegm((datetime.utcnow() + timedelta(hours=new_ttl)).utctimetuple()) auth_token_1 = user_1.encode_auth_token() - self.assertTrue(isinstance(auth_token_1, bytes)) + self.assertTrue(isinstance(auth_token_1, str)) self.assertEqual("dhf8r", user_1.decode_auth_token(auth_token_1).get("sub")) #actual_exp_1 = user_1.decode_auth_token(auth_token_1).get("exp") #self.assertTrue(expected_exp_1 - 1000 <= actual_exp_1 <= expected_exp_1 + 1000) @@ -131,7 +131,7 @@ class TestAuthentication(BaseTest): admin_user = self._login_as_admin() admin_study = self._make_fake_study(admin_user.uid) - admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) + admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token()) rv_add_study = self.app.post( '/v1.0/study', @@ -164,7 +164,7 @@ class TestAuthentication(BaseTest): # Non-admin user should not be able to delete a study non_admin_user = self._login_as_non_admin() - non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) + non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token()) non_admin_study = self._make_fake_study(non_admin_user.uid) rv_add_study = self.app.post( @@ -211,7 +211,7 @@ class TestAuthentication(BaseTest): self.load_example_data() admin_user = self._login_as_admin() - admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) + admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token()) # User should not be in the system yet. non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() @@ -230,7 +230,7 @@ class TestAuthentication(BaseTest): self.logout() non_admin_user = self._login_as_non_admin() self.assertEqual(non_admin_user.uid, self.non_admin_uid) - non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) + non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token()) # Add a study for the non-admin user non_admin_study = self._make_fake_study(self.non_admin_uid) From 7831bef0506f963b506a8f2c505277589c7ac6cc Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 5 May 2021 15:59:00 -0400 Subject: [PATCH 34/37] Don't fail the sync completely when a remote file does not exist. --- crc/api/workflow_sync.py | 9 ++++++--- crc/services/workflow_sync.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crc/api/workflow_sync.py b/crc/api/workflow_sync.py index 3204e936..54c5e8c6 100644 --- a/crc/api/workflow_sync.py +++ b/crc/api/workflow_sync.py @@ -184,9 +184,12 @@ def update_or_create_current_file(remote,workflow_spec_id,updatefile): currentfile.content_type = updatefile['content_type'] currentfile.primary_process_id = updatefile['primary_process_id'] session.add(currentfile) - content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash']) - FileService.update_file(currentfile, content, updatefile['type']) - + try: + content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash']) + FileService.update_file(currentfile, content, updatefile['type']) + except ApiError: + # Remote files doesn't exist, don't update it. + print("Remote file " + currentfile.name + " does not exist, so not syncing.") def sync_changed_files(remote,workflow_spec_id): """ diff --git a/crc/services/workflow_sync.py b/crc/services/workflow_sync.py index 849c815e..2c16cf5f 100644 --- a/crc/services/workflow_sync.py +++ b/crc/services/workflow_sync.py @@ -47,6 +47,6 @@ class WorkflowSyncService(object): return json.loads(response.text) else: raise ApiError("workflow_sync_error", - "Received an invalid response from the protocol builder (status %s): %s when calling " + "Received an invalid response from the remote CR-Connect API (status %s): %s when calling " "url '%s'." % (response.status_code, response.text, url)) From 5a79b80f326890826e953de35d97b2680210b913 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 5 May 2021 20:21:33 -0400 Subject: [PATCH 35/37] fixes #322, do not error out deleting files that have assocaited data stores, just remove the data store. --- crc/services/file_service.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crc/services/file_service.py b/crc/services/file_service.py index 16b16f5e..3d8a1e39 100644 --- a/crc/services/file_service.py +++ b/crc/services/file_service.py @@ -14,6 +14,7 @@ from sqlalchemy.exc import IntegrityError from crc import session, app from crc.api.common import ApiError +from crc.models.data_store import DataStoreModel from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile from crc.services.cache_service import cache @@ -392,6 +393,7 @@ class FileService(object): session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete() session.query(LookupFileModel).filter_by(id=lf.id).delete() session.query(FileDataModel).filter_by(file_model_id=file_id).delete() + session.query(DataStoreModel).filter_by(file_id=file_id).delete() session.query(FileModel).filter_by(id=file_id).delete() session.commit() except IntegrityError as ie: From 620b9a5188faba3438d1a6a0d191592bc45cd441 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 5 May 2021 21:36:57 -0400 Subject: [PATCH 36/37] Fixing a regression. It's critical that Spiffworkflow's box implement deepcopy, as this is used by Jinga prior to generating a word document. --- Pipfile.lock | 8 ++++---- crc/scripts/complete_template.py | 5 ++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 099e141e..10f6e573 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -885,10 +885,10 @@ }, "six": { "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.15.0" + "version": "==1.16.0" }, "snowballstemmer": { "hashes": [ @@ -957,7 +957,7 @@ }, "spiffworkflow": { "git": "https://github.com/sartography/SpiffWorkflow.git", - "ref": "54a3cda7c40b7ee35e0b21ede92606d1097279dd" + "ref": "1a44d004d657bc5773551254aafba88993ae6d35" }, "sqlalchemy": { "hashes": [ diff --git a/crc/scripts/complete_template.py b/crc/scripts/complete_template.py index 32bee509..514cf04d 100644 --- a/crc/scripts/complete_template.py +++ b/crc/scripts/complete_template.py @@ -114,7 +114,10 @@ Takes two arguments: doc_context = self.rich_text_update(doc_context) doc_context = self.append_images(doc, doc_context, image_file_data) jinja_env = jinja2.Environment(autoescape=True) - doc.render(doc_context, jinja_env) + try: + doc.render(doc_context, jinja_env) + except Exception as e: + print (e) target_stream = BytesIO() doc.save(target_stream) target_stream.seek(0) # move to the beginning of the stream. From a1bb30e689c61449e9e882748503b764227eaacf Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 14 May 2021 12:28:50 -0400 Subject: [PATCH 37/37] Switching from using "default" to "server_default" for all default time settings, and running migrations so that this is set at the database level rather than in python, to hopefully correct some issues with dates being 4 hours in the future. Having a very hard time replicating this issue locally. --- crc/models/data_store.py | 2 +- crc/models/file.py | 2 +- crc/models/ldap.py | 2 +- crc/models/study.py | 4 +-- crc/models/workflow.py | 2 +- migrations/versions/bbf064082623_.py | 38 ++++++++++++++++++++++++++++ postgres/docker-compose.yml | 2 ++ 7 files changed, 46 insertions(+), 6 deletions(-) create mode 100644 migrations/versions/bbf064082623_.py diff --git a/crc/models/data_store.py b/crc/models/data_store.py index 8e5fbf07..07017cee 100644 --- a/crc/models/data_store.py +++ b/crc/models/data_store.py @@ -10,7 +10,7 @@ from crc import db, ma class DataStoreModel(db.Model): __tablename__ = 'data_store' id = db.Column(db.Integer, primary_key=True) - last_updated = db.Column(db.DateTime(timezone=True), default=func.now()) + last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now()) key = db.Column(db.String, nullable=False) workflow_id = db.Column(db.Integer) study_id = db.Column(db.Integer, nullable=True) diff --git a/crc/models/file.py b/crc/models/file.py index cdf29faf..9f3073e3 100644 --- a/crc/models/file.py +++ b/crc/models/file.py @@ -66,7 +66,7 @@ class FileDataModel(db.Model): data = deferred(db.Column(db.LargeBinary)) # Don't load it unless you have to. version = db.Column(db.Integer, default=0) size = db.Column(db.Integer, default=0) - date_created = db.Column(db.DateTime(timezone=True), default=func.now()) + date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) file_model_id = db.Column(db.Integer, db.ForeignKey('file.id')) file_model = db.relationship("FileModel", foreign_keys=[file_model_id]) diff --git a/crc/models/ldap.py b/crc/models/ldap.py index 802e0d36..10d15ef3 100644 --- a/crc/models/ldap.py +++ b/crc/models/ldap.py @@ -15,7 +15,7 @@ class LdapModel(db.Model): department = db.Column(db.String) affiliation = db.Column(db.String) sponsor_type = db.Column(db.String) - date_cached = db.Column(db.DateTime(timezone=True), default=func.now()) + date_cached = db.Column(db.DateTime(timezone=True), server_default=func.now()) @classmethod def from_entry(cls, entry): diff --git a/crc/models/study.py b/crc/models/study.py index 99d1d585..61f58472 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -41,7 +41,7 @@ class StudyModel(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String) short_title = db.Column(db.String, nullable=True) - last_updated = db.Column(db.DateTime(timezone=True), default=func.now()) + last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now()) status = db.Column(db.Enum(StudyStatus)) irb_status = db.Column(db.Enum(IrbStatus)) primary_investigator_id = db.Column(db.String, nullable=True) @@ -85,7 +85,7 @@ class StudyEvent(db.Model): id = db.Column(db.Integer, primary_key=True) study_id = db.Column(db.Integer, db.ForeignKey(StudyModel.id), nullable=False) study = db.relationship(StudyModel, back_populates='events_history') - create_date = db.Column(db.DateTime(timezone=True), default=func.now()) + create_date = db.Column(db.DateTime(timezone=True), server_default=func.now()) status = db.Column(db.Enum(StudyStatus)) comment = db.Column(db.String, default='') event_type = db.Column(db.Enum(StudyEventType)) diff --git a/crc/models/workflow.py b/crc/models/workflow.py index 60696104..5b1c49f6 100644 --- a/crc/models/workflow.py +++ b/crc/models/workflow.py @@ -89,7 +89,7 @@ class WorkflowModel(db.Model): workflow_spec = db.relationship("WorkflowSpecModel") total_tasks = db.Column(db.Integer, default=0) completed_tasks = db.Column(db.Integer, default=0) - last_updated = db.Column(db.DateTime(timezone=True),default=func.now()) + last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now()) user_id = db.Column(db.String, default=None) # Order By is important or generating hashes on reviews. dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan", diff --git a/migrations/versions/bbf064082623_.py b/migrations/versions/bbf064082623_.py new file mode 100644 index 00000000..1ee1885a --- /dev/null +++ b/migrations/versions/bbf064082623_.py @@ -0,0 +1,38 @@ +"""empty message + +Revision ID: bbf064082623 +Revises: c1449d1d1681 +Create Date: 2021-05-13 15:07:44.463757 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +from sqlalchemy import func + +revision = 'bbf064082623' +down_revision = 'c1449d1d1681' +branch_labels = None +depends_on = None + + +def upgrade(): + op.alter_column('data_store', 'last_updated', server_default=func.now()) + op.alter_column('file_data', 'date_created', server_default=func.now()) + op.alter_column('data_store', 'last_updated', server_default=func.now()) + op.alter_column('ldap_model', 'date_cached', server_default=func.now()) + op.alter_column('study', 'last_updated', server_default=func.now()) + op.alter_column('study_event', 'create_date', server_default=func.now()) + op.alter_column('workflow', 'last_updated', server_default=func.now()) + + +def downgrade(): + op.alter_column('data_store', 'last_updated', server_default=None) + op.alter_column('file_data', 'date_created', server_default=None) + op.alter_column('data_store', 'last_updated', server_default=None) + op.alter_column('ldap_model', 'date_cached', server_default=None) + op.alter_column('study', 'last_updated', server_default=None) + op.alter_column('study_event', 'create_date', server_default=None) + op.alter_column('workflow', 'last_updated', server_default=None) diff --git a/postgres/docker-compose.yml b/postgres/docker-compose.yml index 31116b85..0b226f21 100644 --- a/postgres/docker-compose.yml +++ b/postgres/docker-compose.yml @@ -10,3 +10,5 @@ services: - POSTGRES_USER=${DB_USER} - POSTGRES_PASSWORD=${DB_PASS} - POSTGRES_MULTIPLE_DATABASES=crc_dev,crc_test,pb,pb_test + - TZ=America/New_York + - PGTZ=America/New_York \ No newline at end of file