From 855f5544e51604b29ed91bffb7730da066f1c81e Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 09:35:08 -0400 Subject: [PATCH 01/31] Adds enrollment_date to study model --- crc/models/study.py | 7 +- crc/services/workflow_service.py | 5 +- .../bpmn/notifications/notifications.bpmn | 100 +++++++++++++++--- migrations/versions/c4ddb69e7ef4_.py | 28 +++++ 4 files changed, 122 insertions(+), 18 deletions(-) create mode 100644 migrations/versions/c4ddb69e7ef4_.py diff --git a/crc/models/study.py b/crc/models/study.py index 7bb2db33..854ce62f 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -25,6 +25,7 @@ class StudyModel(db.Model): investigator_uids = db.Column(db.ARRAY(db.String), nullable=True) requirements = db.Column(db.ARRAY(db.Integer), nullable=True) on_hold = db.Column(db.Boolean, default=False) + enrollment_date = db.Column(db.DateTime(timezone=True), nullable=True) def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy): self.hsr_number = pbs.HSRNUMBER @@ -108,7 +109,7 @@ class Study(object): id=None, protocol_builder_status=None, sponsor="", hsr_number="", ind_number="", categories=[], - files=[], approvals=[], **argsv): + files=[], approvals=[], enrollment_date=None, **argsv): self.id = id self.user_uid = user_uid self.title = title @@ -122,6 +123,7 @@ class Study(object): self.approvals = approvals self.warnings = [] self.files = files + self.enrollment_date = enrollment_date @classmethod def from_model(cls, study_model: StudyModel): @@ -154,11 +156,12 @@ class StudySchema(ma.Schema): ind_number = fields.String(allow_none=True) files = fields.List(fields.Nested(FileSchema), dump_only=True) approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True) + enrollment_date = fields.Date(allow_none=True) class Meta: model = Study additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid", - "sponsor", "ind_number", "approvals", "files"] + "sponsor", "ind_number", "approvals", "files", "enrollment_date"] unknown = INCLUDE @marshmallow.post_load diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 3205e800..65794037 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -154,10 +154,9 @@ class WorkflowService(object): if len(field.options) > 0: random_choice = random.choice(field.options) if isinstance(random_choice, dict): - choice = random.choice(field.options) return { - 'value': choice['id'], - 'label': choice['name'] + 'value': random_choice['id'], + 'label': random_choice['name'] } else: # fixme: why it is sometimes an EnumFormFieldOption, and other times not? diff --git a/crc/static/bpmn/notifications/notifications.bpmn b/crc/static/bpmn/notifications/notifications.bpmn index 4c01a711..cd73505f 100644 --- a/crc/static/bpmn/notifications/notifications.bpmn +++ b/crc/static/bpmn/notifications/notifications.bpmn @@ -1,42 +1,116 @@ - + + + + + + + StartEvent_1 + Activity_1qpy9ra + Event_1m9fnmv + + + Gateway_0ved0t9 + Activity_107ojvq + + Flow_0q51aiq - - + - + Flow_0q51aiq - Flow_0ai4j1x + Flow_11tnx3n + Flow_0d2snmk + + + + Flow_0apr3nj + Flow_0mhtlkt + Flow_11tnx3n + + + + is_study_approved == True + - Flow_0ai4j1x + Flow_0mhtlkt - + + is_study_approved == False + + + + + + + + Flow_0d2snmk + Flow_0apr3nj + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + - + + + + diff --git a/migrations/versions/c4ddb69e7ef4_.py b/migrations/versions/c4ddb69e7ef4_.py new file mode 100644 index 00000000..533d2f86 --- /dev/null +++ b/migrations/versions/c4ddb69e7ef4_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: c4ddb69e7ef4 +Revises: ffef4661a37d +Create Date: 2020-07-22 09:04:09.769239 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c4ddb69e7ef4' +down_revision = 'ffef4661a37d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('study', sa.Column('enrollment_date', sa.DateTime(timezone=True), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('study', 'enrollment_date') + # ### end Alembic commands ### From 60f907f852c4bb5e872d3f92b97b2fbca2ea1ea3 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:47:25 -0400 Subject: [PATCH 02/31] Updates package versions. --- Pipfile.lock | 75 +++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 69 insertions(+), 6 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index bd8581a5..f726450c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -35,6 +35,7 @@ "sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b", "sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.6.0" }, "aniso8601": { @@ -49,6 +50,7 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "babel": { @@ -56,6 +58,7 @@ "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.0" }, "bcrypt": { @@ -79,6 +82,7 @@ "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.7" }, "beautifulsoup4": { @@ -107,6 +111,7 @@ "sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916", "sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.4.6" }, "certifi": { @@ -161,6 +166,7 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, "clickclick": { @@ -182,6 +188,7 @@ "sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1", "sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd" ], + "markers": "python_version >= '3.6'", "version": "==5.0.0" }, "connexion": { @@ -240,6 +247,7 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "docxtpl": { @@ -322,12 +330,14 @@ "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.4" }, "future": { "hashes": [ "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.2" }, "gunicorn": { @@ -350,6 +360,7 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -357,6 +368,7 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "importlib-metadata": { @@ -372,6 +384,7 @@ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], + "markers": "python_version >= '3.5'", "version": "==0.5.0" }, "itsdangerous": { @@ -379,6 +392,7 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jdcal": { @@ -393,6 +407,7 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, "jsonschema": { @@ -407,11 +422,16 @@ "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.6.11" }, "ldap3": { "hashes": [ + "sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744", "sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0", + "sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd", + "sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871", + "sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c", "sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b" ], "index": "pypi", @@ -459,6 +479,7 @@ "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27", "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.3" }, "markdown": { @@ -505,6 +526,7 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "marshmallow": { @@ -560,6 +582,7 @@ "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e", "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc" ], + "markers": "python_version >= '3.6'", "version": "==1.19.1" }, "openapi-spec-validator": { @@ -583,6 +606,7 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pandas": { @@ -645,8 +669,19 @@ }, "pyasn1": { "hashes": [ + "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", + "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", + "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" + "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", + "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", + "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", + "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", + "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", + "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", + "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", + "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776" ], "version": "==0.4.8" }, @@ -655,6 +690,7 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -662,6 +698,7 @@ "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44", "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324" ], + "markers": "python_version >= '3.5'", "version": "==2.6.1" }, "pyjwt": { @@ -677,6 +714,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { @@ -690,6 +728,7 @@ "sha256:2df0d0e0769b6d6e7daed8d5e0b10a38e0b5486ee75914c30f2a927f7a374111", "sha256:ddea019b4ee53fe3f822407b0b26ec54ff6233042c68b54244d3503ae4d6218f" ], + "markers": "python_version >= '3.6'", "version": "==5.0.1" }, "python-dateutil": { @@ -708,9 +747,11 @@ }, "python-editor": { "hashes": [ - "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", + "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", + "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", - "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d" ], "version": "==1.0.4" }, @@ -824,6 +865,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -838,6 +880,7 @@ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" ], + "markers": "python_version >= '3.5'", "version": "==2.0.1" }, "sphinx": { @@ -853,6 +896,7 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -860,6 +904,7 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -867,6 +912,7 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -874,6 +920,7 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], + "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -881,6 +928,7 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -888,6 +936,7 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], + "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "spiffworkflow": { @@ -926,6 +975,7 @@ "sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274", "sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.18" }, "swagger-ui-bundle": { @@ -938,16 +988,18 @@ }, "urllib3": { "hashes": [ - "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", - "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" + "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", + "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], - "version": "==1.25.9" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.25.10" }, "vine": { "hashes": [ "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.0" }, "waitress": { @@ -955,6 +1007,7 @@ "sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261", "sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.4.4" }, "webob": { @@ -962,6 +1015,7 @@ "sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b", "sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.6" }, "webtest": { @@ -1008,6 +1062,7 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], + "markers": "python_version >= '3.6'", "version": "==3.1.0" } }, @@ -1017,6 +1072,7 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "coverage": { @@ -1072,6 +1128,7 @@ "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" ], + "markers": "python_version >= '3.5'", "version": "==8.4.0" }, "packaging": { @@ -1079,6 +1136,7 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pbr": { @@ -1094,6 +1152,7 @@ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { @@ -1101,6 +1160,7 @@ "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2", "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.9.0" }, "pyparsing": { @@ -1108,6 +1168,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pytest": { @@ -1123,6 +1184,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "wcwidth": { @@ -1137,6 +1199,7 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], + "markers": "python_version >= '3.6'", "version": "==3.1.0" } } From b87f55fbd75df75c53b84541107afd053733cd54 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:47:53 -0400 Subject: [PATCH 03/31] Exposes date in TaskEvent endpoint --- crc/models/task_event.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crc/models/task_event.py b/crc/models/task_event.py index e3914468..c696bc26 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -50,6 +50,7 @@ class TaskEvent(object): self.task_type = model.task_type self.task_state = model.task_state self.task_lane = model.task_lane + self.date = model.date class TaskEventSchema(ma.Schema): @@ -59,5 +60,5 @@ class TaskEventSchema(ma.Schema): class Meta: model = TaskEvent additional = ["id", "user_uid", "action", "task_id", "task_title", - "task_name", "task_type", "task_state", "task_lane"] + "task_name", "task_type", "task_state", "task_lane", "date"] unknown = INCLUDE From 6fae89b1fc841fff5d5bceb0dd28d925f20ac175 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:48:36 -0400 Subject: [PATCH 04/31] Adds manual task --- .../bpmn/notifications/notifications.bpmn | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/crc/static/bpmn/notifications/notifications.bpmn b/crc/static/bpmn/notifications/notifications.bpmn index cd73505f..a9fdedbf 100644 --- a/crc/static/bpmn/notifications/notifications.bpmn +++ b/crc/static/bpmn/notifications/notifications.bpmn @@ -9,6 +9,7 @@ StartEvent_1 Activity_1qpy9ra Event_1m9fnmv + Activity_0c5drp3 Gateway_0ved0t9 @@ -25,7 +26,7 @@ Flow_0q51aiq - Flow_11tnx3n + Flow_1ugh4wn Flow_0d2snmk @@ -42,7 +43,7 @@ Flow_0mhtlkt - + is_study_approved == False @@ -54,6 +55,12 @@ Flow_0d2snmk Flow_0apr3nj + + + Your request was not approved. Try again. + Flow_11tnx3n + Flow_1ugh4wn + @@ -68,10 +75,9 @@ - - + - + @@ -94,6 +100,10 @@ + + + + @@ -112,6 +122,9 @@ + + + From 5ec5fcb4e425b723f804f8fa0ff0e7ec5f3530e9 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Thu, 23 Jul 2020 12:00:24 -0400 Subject: [PATCH 05/31] Correcting an issue with the Navigation where it did not correctly handle looking back to a previous task within the workflow. In some cases the session was not getting committed, leaving rogue assignments outstanding for a workflow. --- Pipfile.lock | 71 ++------------------------------ crc/services/workflow_service.py | 1 + tests/test_user_roles.py | 63 +++++++++++++++++++++++++++- 3 files changed, 67 insertions(+), 68 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index f726450c..8cee7118 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -35,7 +35,6 @@ "sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b", "sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.6.0" }, "aniso8601": { @@ -50,7 +49,6 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "babel": { @@ -58,7 +56,6 @@ "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.0" }, "bcrypt": { @@ -82,7 +79,6 @@ "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.7" }, "beautifulsoup4": { @@ -111,7 +107,6 @@ "sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916", "sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.4.6" }, "certifi": { @@ -166,7 +161,6 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, "clickclick": { @@ -188,7 +182,6 @@ "sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1", "sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd" ], - "markers": "python_version >= '3.6'", "version": "==5.0.0" }, "connexion": { @@ -247,7 +240,6 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "docxtpl": { @@ -330,14 +322,12 @@ "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.4" }, "future": { "hashes": [ "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.2" }, "gunicorn": { @@ -360,7 +350,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -368,7 +357,6 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "importlib-metadata": { @@ -384,7 +372,6 @@ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], - "markers": "python_version >= '3.5'", "version": "==0.5.0" }, "itsdangerous": { @@ -392,7 +379,6 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jdcal": { @@ -407,7 +393,6 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, "jsonschema": { @@ -422,16 +407,11 @@ "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.6.11" }, "ldap3": { "hashes": [ - "sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744", "sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0", - "sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd", - "sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871", - "sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c", "sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b" ], "index": "pypi", @@ -479,7 +459,6 @@ "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27", "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.3" }, "markdown": { @@ -526,7 +505,6 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "marshmallow": { @@ -582,7 +560,6 @@ "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e", "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc" ], - "markers": "python_version >= '3.6'", "version": "==1.19.1" }, "openapi-spec-validator": { @@ -606,7 +583,6 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pandas": { @@ -669,19 +645,8 @@ }, "pyasn1": { "hashes": [ - "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", - "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", - "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", - "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", - "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", - "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", - "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", - "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", - "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", - "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776" + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" ], "version": "==0.4.8" }, @@ -690,7 +655,6 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -698,7 +662,6 @@ "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44", "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324" ], - "markers": "python_version >= '3.5'", "version": "==2.6.1" }, "pyjwt": { @@ -714,7 +677,6 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { @@ -728,7 +690,6 @@ "sha256:2df0d0e0769b6d6e7daed8d5e0b10a38e0b5486ee75914c30f2a927f7a374111", "sha256:ddea019b4ee53fe3f822407b0b26ec54ff6233042c68b54244d3503ae4d6218f" ], - "markers": "python_version >= '3.6'", "version": "==5.0.1" }, "python-dateutil": { @@ -747,11 +708,9 @@ }, "python-editor": { "hashes": [ - "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", - "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", - "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", - "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d" + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" ], "version": "==1.0.4" }, @@ -865,7 +824,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -880,7 +838,6 @@ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" ], - "markers": "python_version >= '3.5'", "version": "==2.0.1" }, "sphinx": { @@ -896,7 +853,6 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -904,7 +860,6 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -912,7 +867,6 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -920,7 +874,6 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], - "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -928,7 +881,6 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -936,13 +888,12 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "spiffworkflow": { "editable": true, "git": "https://github.com/sartography/SpiffWorkflow.git", - "ref": "74529738b4e16be5aadd846669a201560f81a6d4" + "ref": "5785d3cab99e319596e1bf0006df96f215febafd" }, "sqlalchemy": { "hashes": [ @@ -975,7 +926,6 @@ "sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274", "sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.18" }, "swagger-ui-bundle": { @@ -991,7 +941,6 @@ "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.10" }, "vine": { @@ -999,7 +948,6 @@ "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.0" }, "waitress": { @@ -1007,7 +955,6 @@ "sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261", "sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.4.4" }, "webob": { @@ -1015,7 +962,6 @@ "sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b", "sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.6" }, "webtest": { @@ -1062,7 +1008,6 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], - "markers": "python_version >= '3.6'", "version": "==3.1.0" } }, @@ -1072,7 +1017,6 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "coverage": { @@ -1128,7 +1072,6 @@ "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" ], - "markers": "python_version >= '3.5'", "version": "==8.4.0" }, "packaging": { @@ -1136,7 +1079,6 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pbr": { @@ -1152,7 +1094,6 @@ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { @@ -1160,7 +1101,6 @@ "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2", "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.9.0" }, "pyparsing": { @@ -1168,7 +1108,6 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pytest": { @@ -1184,7 +1123,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "wcwidth": { @@ -1199,7 +1137,6 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], - "markers": "python_version >= '3.6'", "version": "==3.1.0" } } diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 65794037..e078166b 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -470,6 +470,7 @@ class WorkflowService(object): db.session.query(TaskEventModel). \ filter(TaskEventModel.workflow_id == processor.workflow_model.id). \ filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete() + db.session.commit() for task in processor.get_current_user_tasks(): user_ids = WorkflowService.get_users_assigned_to_task(processor, task) diff --git a/tests/test_user_roles.py b/tests/test_user_roles.py index cc7ff613..d1e85563 100644 --- a/tests/test_user_roles.py +++ b/tests/test_user_roles.py @@ -200,4 +200,65 @@ class TestTasksApi(BaseTest): workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) self.assertEquals('COMPLETED', workflow_api.next_task.state) self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end. - self.assertEquals(WorkflowStatus.complete, workflow_api.status) \ No newline at end of file + self.assertEquals(WorkflowStatus.complete, workflow_api.status) + + def get_assignment_task_events(self, uid): + return db.session.query(TaskEventModel). \ + filter(TaskEventModel.user_uid == uid). \ + filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).all() + + def test_workflow_reset_correctly_resets_the_task_events(self): + + submitter = self.create_user(uid='lje5u') + supervisor = self.create_user(uid='lb3dp') + workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid) + workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) + + # User lje5u can complete the first task, and set her supervisor + data = workflow_api.next_task.data + data['supervisor'] = supervisor.uid + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + + # At this point there should be a task_log with an action of ASSIGNMENT on it for + # the supervisor. + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Resetting the workflow at this point should clear the event log. + workflow_api = self.get_workflow_api(workflow, hard_reset=True, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Re-complete first task, and awaiting tasks should shift to 0 for for submitter, and 1 for supervisor + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Complete the supervisor task with rejected approval, and the assignments should switch. + workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid) + data = workflow_api.next_task.data + data["approval"] = False + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid) + self.assertEquals(1, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Mark the return form review page as complete, and then recomplete the form, and assignments switch yet again. + workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Complete the supervisor task, accepting the approval, and the workflow is completed. + # When it is all done, there should be no outstanding assignments. + workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid) + data = workflow_api.next_task.data + data["approval"] = True + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid) + self.assertEquals(WorkflowStatus.complete, workflow_api.status) + self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end. + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Sending any subsequent complete forms does not result in a new task event + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) From 842d2ee100798ca020f262e05759d0a14294fc81 Mon Sep 17 00:00:00 2001 From: Carlos Lopez Date: Thu, 23 Jul 2020 10:58:24 -0600 Subject: [PATCH 06/31] Supporting study status update --- crc/api.yml | 12 +++++++----- crc/api/study.py | 6 ++++-- crc/models/protocol_builder.py | 8 ++++---- crc/models/study.py | 25 ++++++++++++++++++++++--- 4 files changed, 37 insertions(+), 14 deletions(-) diff --git a/crc/api.yml b/crc/api.yml index 4c6ebd1b..a9261d08 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -1046,22 +1046,24 @@ components: example: dhf8r protocol_builder_status: type: string - enum: [INCOMPLETE, ACTIVE, HOLD, OPEN, ABANDONED] + enum: [incomplete, active, hold, open, abandoned] example: done sponsor: type: string + x-nullable: true example: "Sartography Pharmaceuticals" ind_number: type: string + x-nullable: true example: "27b-6-42" hsr_number: type: string x-nullable: true example: "27b-6-1212" - categories: - type: array - items: - $ref: "#/components/schemas/WorkflowSpecCategory" + # categories: + # type: array + # items: + # $ref: "#/components/schemas/WorkflowSpecCategory" WorkflowSpec: properties: id: diff --git a/crc/api/study.py b/crc/api/study.py index 8fdd1b4a..ba2b7c0d 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -6,7 +6,7 @@ from sqlalchemy.exc import IntegrityError from crc import session from crc.api.common import ApiError, ApiErrorSchema from crc.models.protocol_builder import ProtocolBuilderStatus -from crc.models.study import StudySchema, StudyModel, Study +from crc.models.study import StudySchema, StudyForUpdateSchema, StudyModel, Study from crc.services.study_service import StudyService @@ -40,10 +40,12 @@ def update_study(study_id, body): if study_model is None: raise ApiError('unknown_study', 'The study "' + study_id + '" is not recognized.') - study: Study = StudySchema().load(body) + study: Study = StudyForUpdateSchema().load(body) study.update_model(study_model) session.add(study_model) session.commit() + # Need to reload the full study to return it to the frontend + study = StudyService.get_study(study_id) return StudySchema().dump(study) diff --git a/crc/models/protocol_builder.py b/crc/models/protocol_builder.py index 9ff1098f..a6bc02cf 100644 --- a/crc/models/protocol_builder.py +++ b/crc/models/protocol_builder.py @@ -23,10 +23,10 @@ class ProtocolBuilderStatus(enum.Enum): # • Open To Enrollment: has start date and HSR number? # • Abandoned: deleted in PB INCOMPLETE = 'incomplete' # Found in PB but not ready to start (not q_complete) - ACTIVE = 'active', # found in PB, marked as "q_complete" and no HSR number and not hold - HOLD = 'hold', # CR Connect side, if the Study ias marked as "hold". - OPEN = 'open', # Open To Enrollment: has start date and HSR number? - ABANDONED = 'Abandoned' # Not found in PB + ACTIVE = 'active' # found in PB, marked as "q_complete" and no HSR number and not hold + HOLD = 'hold' # CR Connect side, if the Study ias marked as "hold". + OPEN = 'open' # Open To Enrollment: has start date and HSR number? + ABANDONED = 'abandoned' # Not found in PB #DRAFT = 'draft', # !Q_COMPLETE diff --git a/crc/models/study.py b/crc/models/study.py index 854ce62f..bc92e5e1 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -133,9 +133,7 @@ class Study(object): return instance def update_model(self, study_model: StudyModel): - for k,v in self.__dict__.items(): - if not k.startswith('_'): - study_model.__dict__[k] = v + study_model.protocol_builder_status = ProtocolBuilderStatus(self.protocol_builder_status) def model_args(self): """Arguments that can be passed into the Study Model to update it.""" @@ -145,6 +143,27 @@ class Study(object): return self_dict +class StudyForUpdateSchema(ma.Schema): + + id = fields.Integer(required=False, allow_none=True) + protocol_builder_status = EnumField(ProtocolBuilderStatus, by_value=True) + hsr_number = fields.String(allow_none=True) + sponsor = fields.String(allow_none=True) + ind_number = fields.String(allow_none=True) + enrollment_date = fields.Date(allow_none=True) + + class Meta: + model = Study + # additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid", + # "sponsor", "ind_number", "approvals", "files", "enrollment_date"] + unknown = INCLUDE + + @marshmallow.post_load + def make_study(self, data, **kwargs): + """Can load the basic study data for updates to the database, but categories are write only""" + return Study(**data) + + class StudySchema(ma.Schema): id = fields.Integer(required=False, allow_none=True) From 6379b26a71d8534006c2ce15b9c90e9a5924f5a8 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Tue, 21 Jul 2020 15:18:08 -0400 Subject: [PATCH 07/31] Add a few more details to the workflow metadata model. --- crc/models/study.py | 7 +++++-- crc/models/task_event.py | 1 - example_data.py | 1 - tests/base_test.py | 19 ++++++++++++++----- tests/study/test_study_service.py | 9 ++++----- tests/test_user_roles.py | 3 ++- 6 files changed, 25 insertions(+), 15 deletions(-) diff --git a/crc/models/study.py b/crc/models/study.py index 47d4eb8f..7bb2db33 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -40,7 +40,7 @@ class StudyModel(db.Model): class WorkflowMetadata(object): - def __init__(self, id, name, display_name, description, spec_version, category_id, state: WorkflowState, status: WorkflowStatus, + def __init__(self, id, name, display_name, description, spec_version, category_id, category_display_name, state: WorkflowState, status: WorkflowStatus, total_tasks, completed_tasks, display_order): self.id = id self.name = name @@ -48,6 +48,7 @@ class WorkflowMetadata(object): self.description = description self.spec_version = spec_version self.category_id = category_id + self.category_display_name = category_display_name self.state = state self.status = status self.total_tasks = total_tasks @@ -64,6 +65,7 @@ class WorkflowMetadata(object): description=workflow.workflow_spec.description, spec_version=workflow.spec_version(), category_id=workflow.workflow_spec.category_id, + category_display_name=workflow.workflow_spec.category.display_name, state=WorkflowState.optional, status=workflow.status, total_tasks=workflow.total_tasks, @@ -79,7 +81,8 @@ class WorkflowMetadataSchema(ma.Schema): class Meta: model = WorkflowMetadata additional = ["id", "name", "display_name", "description", - "total_tasks", "completed_tasks", "display_order"] + "total_tasks", "completed_tasks", "display_order", + "category_id", "category_display_name"] unknown = INCLUDE diff --git a/crc/models/task_event.py b/crc/models/task_event.py index a6cb1a2d..e3914468 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -56,7 +56,6 @@ class TaskEventSchema(ma.Schema): study = fields.Nested(StudySchema, dump_only=True) workflow = fields.Nested(WorkflowMetadataSchema, dump_only=True) - class Meta: model = TaskEvent additional = ["id", "user_uid", "action", "task_id", "task_title", diff --git a/example_data.py b/example_data.py index efdfe3b3..8b9b0c27 100644 --- a/example_data.py +++ b/example_data.py @@ -251,7 +251,6 @@ class ExampleDataLoader: master_spec=False, from_tests=True) - def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False, category_id=None, display_order=None, from_tests=False): """Assumes that a directory exists in static/bpmn with the same name as the given id. diff --git a/tests/base_test.py b/tests/base_test.py index 6ea1966d..3f0b2405 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -19,7 +19,7 @@ from crc.models.protocol_builder import ProtocolBuilderStatus from crc.models.task_event import TaskEventModel from crc.models.study import StudyModel from crc.models.user import UserModel -from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel +from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel from crc.services.file_service import FileService from crc.services.study_service import StudyService from crc.services.workflow_service import WorkflowService @@ -164,14 +164,21 @@ class BaseTest(unittest.TestCase): self.assertGreater(len(file_data), 0) @staticmethod - def load_test_spec(dir_name, master_spec=False, category_id=None): + def load_test_spec(dir_name, display_name=None, master_spec=False, category_id=None): """Loads a spec into the database based on a directory in /tests/data""" + if category_id is None: + category = WorkflowSpecCategoryModel(name="test", display_name="Test Workflows", display_order=0) + db.session.add(category) + db.session.commit() + category_id = category.id if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0: return session.query(WorkflowSpecModel).filter_by(id=dir_name).first() filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*") + if display_name is None: + display_name = dir_name return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath, master_spec=master_spec, - category_id=category_id) + display_name=display_name, category_id=category_id) @staticmethod def protocol_builder_response(file_name): @@ -263,11 +270,13 @@ class BaseTest(unittest.TestCase): return full_study - def create_workflow(self, workflow_name, study=None, category_id=None, as_user="dhf8r"): + def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"): db.session.flush() spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first() if spec is None: - spec = self.load_test_spec(workflow_name, category_id=category_id) + if display_name is None: + display_name = workflow_name + spec = self.load_test_spec(workflow_name, display_name, category_id=category_id) if study is None: study = self.create_study(uid=as_user) workflow_model = StudyService._create_workflow_model(study, spec) diff --git a/tests/study/test_study_service.py b/tests/study/test_study_service.py index b436835f..f1e43c8a 100644 --- a/tests/study/test_study_service.py +++ b/tests/study/test_study_service.py @@ -27,7 +27,10 @@ class TestStudyService(BaseTest): # Assure some basic models are in place, This is a damn mess. Our database models need an overhaul to make # this easier - better relationship modeling is now critical. - self.load_test_spec("top_level_workflow", master_spec=True) + cat = WorkflowSpecCategoryModel(name="approvals", display_name="Approvals", display_order=0) + db.session.add(cat) + db.session.commit() + self.load_test_spec("top_level_workflow", master_spec=True, category_id=cat.id) user = db.session.query(UserModel).filter(UserModel.uid == "dhf8r").first() if not user: user = UserModel(uid="dhf8r", email_address="whatever@stuff.com", display_name="Stayathome Smellalots") @@ -39,11 +42,7 @@ class TestStudyService(BaseTest): study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid) db.session.add(study) - cat = WorkflowSpecCategoryModel(name="approvals", display_name="Approvals", display_order=0) - db.session.add(cat) - db.session.commit() - self.assertIsNotNone(cat.id) self.load_test_spec("random_fact", category_id=cat.id) self.assertIsNotNone(study.id) diff --git a/tests/test_user_roles.py b/tests/test_user_roles.py index 8a0ea8ae..ed879d2e 100644 --- a/tests/test_user_roles.py +++ b/tests/test_user_roles.py @@ -68,7 +68,7 @@ class TestTasksApi(BaseTest): def test_get_outstanding_tasks_awaiting_current_user(self): submitter = self.create_user(uid='lje5u') supervisor = self.create_user(uid='lb3dp') - workflow = self.create_workflow('roles', as_user=submitter.uid) + workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid) workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) # User lje5u can complete the first task, and set her supervisor @@ -94,6 +94,7 @@ class TestTasksApi(BaseTest): self.assertEquals(1, len(tasks)) self.assertEquals(workflow.id, tasks[0]['workflow']['id']) self.assertEquals(workflow.study.id, tasks[0]['study']['id']) + self.assertEquals("Test Workflows", tasks[0]['workflow']['category_display_name']) # Assure we can say something sensible like: # You have a task called "Approval" to be completed in the "Supervisor Approval" workflow From 4adb6a1b44ce131fe4eb91d844e90eae5dc1db17 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 09:35:08 -0400 Subject: [PATCH 08/31] Adds enrollment_date to study model --- crc/models/study.py | 7 +- crc/services/workflow_service.py | 5 +- .../bpmn/notifications/notifications.bpmn | 100 +++++++++++++++--- migrations/versions/c4ddb69e7ef4_.py | 28 +++++ 4 files changed, 122 insertions(+), 18 deletions(-) create mode 100644 migrations/versions/c4ddb69e7ef4_.py diff --git a/crc/models/study.py b/crc/models/study.py index 7bb2db33..854ce62f 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -25,6 +25,7 @@ class StudyModel(db.Model): investigator_uids = db.Column(db.ARRAY(db.String), nullable=True) requirements = db.Column(db.ARRAY(db.Integer), nullable=True) on_hold = db.Column(db.Boolean, default=False) + enrollment_date = db.Column(db.DateTime(timezone=True), nullable=True) def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy): self.hsr_number = pbs.HSRNUMBER @@ -108,7 +109,7 @@ class Study(object): id=None, protocol_builder_status=None, sponsor="", hsr_number="", ind_number="", categories=[], - files=[], approvals=[], **argsv): + files=[], approvals=[], enrollment_date=None, **argsv): self.id = id self.user_uid = user_uid self.title = title @@ -122,6 +123,7 @@ class Study(object): self.approvals = approvals self.warnings = [] self.files = files + self.enrollment_date = enrollment_date @classmethod def from_model(cls, study_model: StudyModel): @@ -154,11 +156,12 @@ class StudySchema(ma.Schema): ind_number = fields.String(allow_none=True) files = fields.List(fields.Nested(FileSchema), dump_only=True) approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True) + enrollment_date = fields.Date(allow_none=True) class Meta: model = Study additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid", - "sponsor", "ind_number", "approvals", "files"] + "sponsor", "ind_number", "approvals", "files", "enrollment_date"] unknown = INCLUDE @marshmallow.post_load diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 3205e800..65794037 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -154,10 +154,9 @@ class WorkflowService(object): if len(field.options) > 0: random_choice = random.choice(field.options) if isinstance(random_choice, dict): - choice = random.choice(field.options) return { - 'value': choice['id'], - 'label': choice['name'] + 'value': random_choice['id'], + 'label': random_choice['name'] } else: # fixme: why it is sometimes an EnumFormFieldOption, and other times not? diff --git a/crc/static/bpmn/notifications/notifications.bpmn b/crc/static/bpmn/notifications/notifications.bpmn index 4c01a711..cd73505f 100644 --- a/crc/static/bpmn/notifications/notifications.bpmn +++ b/crc/static/bpmn/notifications/notifications.bpmn @@ -1,42 +1,116 @@ - + + + + + + + StartEvent_1 + Activity_1qpy9ra + Event_1m9fnmv + + + Gateway_0ved0t9 + Activity_107ojvq + + Flow_0q51aiq - - + - + Flow_0q51aiq - Flow_0ai4j1x + Flow_11tnx3n + Flow_0d2snmk + + + + Flow_0apr3nj + Flow_0mhtlkt + Flow_11tnx3n + + + + is_study_approved == True + - Flow_0ai4j1x + Flow_0mhtlkt - + + is_study_approved == False + + + + + + + + Flow_0d2snmk + Flow_0apr3nj + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + - + + + + diff --git a/migrations/versions/c4ddb69e7ef4_.py b/migrations/versions/c4ddb69e7ef4_.py new file mode 100644 index 00000000..533d2f86 --- /dev/null +++ b/migrations/versions/c4ddb69e7ef4_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: c4ddb69e7ef4 +Revises: ffef4661a37d +Create Date: 2020-07-22 09:04:09.769239 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c4ddb69e7ef4' +down_revision = 'ffef4661a37d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('study', sa.Column('enrollment_date', sa.DateTime(timezone=True), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('study', 'enrollment_date') + # ### end Alembic commands ### From 91c835906cd768e54076e8d321a1b7942a8ee3f8 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:47:25 -0400 Subject: [PATCH 09/31] Updates package versions. --- Pipfile.lock | 117 +++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 114 insertions(+), 3 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 72657ab7..c3fb647d 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -30,6 +30,14 @@ "index": "pypi", "version": "==1.4.2" }, + "amqp": { + "hashes": [ + "sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b", + "sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==2.6.0" + }, "aniso8601": { "hashes": [ "sha256:529dcb1f5f26ee0df6c0a1ee84b7b27197c3c50fc3a6321d66c544689237d072", @@ -42,6 +50,7 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "babel": { @@ -49,6 +58,7 @@ "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.0" }, "bcrypt": { @@ -72,6 +82,7 @@ "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.7" }, "beautifulsoup4": { @@ -88,6 +99,14 @@ ], "version": "==1.4" }, + "celery": { + "hashes": [ + "sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916", + "sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.4.6" + }, "certifi": { "hashes": [ "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", @@ -140,6 +159,7 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, "clickclick": { @@ -156,6 +176,14 @@ ], "version": "==0.9.1" }, + "configparser": { + "hashes": [ + "sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1", + "sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd" + ], + "markers": "python_version >= '3.6'", + "version": "==5.0.0" + }, "connexion": { "extras": [ "swagger-ui" @@ -212,6 +240,7 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "docxtpl": { @@ -294,8 +323,16 @@ "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.4" }, + "future": { + "hashes": [ + "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.18.2" + }, "gunicorn": { "hashes": [ "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626", @@ -316,6 +353,7 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -323,6 +361,7 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "importlib-metadata": { @@ -338,6 +377,7 @@ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], + "markers": "python_version >= '3.5'", "version": "==0.5.0" }, "itsdangerous": { @@ -345,6 +385,7 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jdcal": { @@ -359,6 +400,7 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, "jsonschema": { @@ -368,9 +410,21 @@ ], "version": "==3.2.0" }, + "kombu": { + "hashes": [ + "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", + "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.6.11" + }, "ldap3": { "hashes": [ + "sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744", "sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0", + "sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd", + "sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871", + "sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c", "sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b" ], "index": "pypi", @@ -418,6 +472,7 @@ "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27", "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.3" }, "markdown": { @@ -464,6 +519,7 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "marshmallow": { @@ -519,6 +575,7 @@ "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e", "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc" ], + "markers": "python_version >= '3.6'", "version": "==1.19.1" }, "openapi-spec-validator": { @@ -542,6 +599,7 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pandas": { @@ -604,8 +662,19 @@ }, "pyasn1": { "hashes": [ + "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", + "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", + "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" + "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", + "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", + "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", + "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", + "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", + "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", + "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", + "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776" ], "version": "==0.4.8" }, @@ -614,6 +683,7 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -621,6 +691,7 @@ "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44", "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324" ], + "markers": "python_version >= '3.5'", "version": "==2.6.1" }, "pyjwt": { @@ -636,6 +707,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { @@ -644,6 +716,14 @@ ], "version": "==0.16.0" }, + "python-box": { + "hashes": [ + "sha256:2df0d0e0769b6d6e7daed8d5e0b10a38e0b5486ee75914c30f2a927f7a374111", + "sha256:ddea019b4ee53fe3f822407b0b26ec54ff6233042c68b54244d3503ae4d6218f" + ], + "markers": "python_version >= '3.6'", + "version": "==5.0.1" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -660,9 +740,11 @@ }, "python-editor": { "hashes": [ - "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", + "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", + "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", - "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d" ], "version": "==1.0.4" }, @@ -721,6 +803,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -735,6 +818,7 @@ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" ], + "markers": "python_version >= '3.5'", "version": "==2.0.1" }, "sphinx": { @@ -750,6 +834,7 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -757,6 +842,7 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -764,6 +850,7 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -771,6 +858,7 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], + "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -778,6 +866,7 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -785,6 +874,7 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], + "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "spiffworkflow": { @@ -822,6 +912,7 @@ "sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274", "sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.18" }, "swagger-ui-bundle": { @@ -837,13 +928,23 @@ "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.10" }, + "vine": { + "hashes": [ + "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", + "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.3.0" + }, "waitress": { "hashes": [ "sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261", "sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.4.4" }, "webob": { @@ -851,6 +952,7 @@ "sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b", "sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.6" }, "webtest": { @@ -897,6 +999,7 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], + "markers": "python_version >= '3.6'", "version": "==3.1.0" } }, @@ -906,6 +1009,7 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "coverage": { @@ -961,6 +1065,7 @@ "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" ], + "markers": "python_version >= '3.5'", "version": "==8.4.0" }, "packaging": { @@ -968,6 +1073,7 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pbr": { @@ -983,6 +1089,7 @@ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { @@ -990,6 +1097,7 @@ "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2", "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.9.0" }, "pyparsing": { @@ -997,6 +1105,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pytest": { @@ -1012,6 +1121,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "wcwidth": { @@ -1026,6 +1136,7 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], + "markers": "python_version >= '3.6'", "version": "==3.1.0" } } From 6cdb9c3b3a72819db445257b914d1f9cd9fa882c Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:47:53 -0400 Subject: [PATCH 10/31] Exposes date in TaskEvent endpoint --- crc/models/task_event.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crc/models/task_event.py b/crc/models/task_event.py index e3914468..c696bc26 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -50,6 +50,7 @@ class TaskEvent(object): self.task_type = model.task_type self.task_state = model.task_state self.task_lane = model.task_lane + self.date = model.date class TaskEventSchema(ma.Schema): @@ -59,5 +60,5 @@ class TaskEventSchema(ma.Schema): class Meta: model = TaskEvent additional = ["id", "user_uid", "action", "task_id", "task_title", - "task_name", "task_type", "task_state", "task_lane"] + "task_name", "task_type", "task_state", "task_lane", "date"] unknown = INCLUDE From 263ea4d00f76046f931c9e5a30cc1160373a3ed3 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 22 Jul 2020 14:48:36 -0400 Subject: [PATCH 11/31] Adds manual task --- .../bpmn/notifications/notifications.bpmn | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/crc/static/bpmn/notifications/notifications.bpmn b/crc/static/bpmn/notifications/notifications.bpmn index cd73505f..a9fdedbf 100644 --- a/crc/static/bpmn/notifications/notifications.bpmn +++ b/crc/static/bpmn/notifications/notifications.bpmn @@ -9,6 +9,7 @@ StartEvent_1 Activity_1qpy9ra Event_1m9fnmv + Activity_0c5drp3 Gateway_0ved0t9 @@ -25,7 +26,7 @@ Flow_0q51aiq - Flow_11tnx3n + Flow_1ugh4wn Flow_0d2snmk @@ -42,7 +43,7 @@ Flow_0mhtlkt - + is_study_approved == False @@ -54,6 +55,12 @@ Flow_0d2snmk Flow_0apr3nj + + + Your request was not approved. Try again. + Flow_11tnx3n + Flow_1ugh4wn + @@ -68,10 +75,9 @@ - - + - + @@ -94,6 +100,10 @@ + + + + @@ -112,6 +122,9 @@ + + + From 0d2cb8c1b8df579a0d884bb5358ff341ec4acff5 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Thu, 23 Jul 2020 12:00:24 -0400 Subject: [PATCH 12/31] Correcting an issue with the Navigation where it did not correctly handle looking back to a previous task within the workflow. In some cases the session was not getting committed, leaving rogue assignments outstanding for a workflow. --- Pipfile.lock | 71 ++------------------------------ crc/services/workflow_service.py | 1 + tests/test_user_roles.py | 63 +++++++++++++++++++++++++++- 3 files changed, 67 insertions(+), 68 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index c3fb647d..a1bb7222 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -35,7 +35,6 @@ "sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b", "sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.6.0" }, "aniso8601": { @@ -50,7 +49,6 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "babel": { @@ -58,7 +56,6 @@ "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.0" }, "bcrypt": { @@ -82,7 +79,6 @@ "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.7" }, "beautifulsoup4": { @@ -104,7 +100,6 @@ "sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916", "sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.4.6" }, "certifi": { @@ -159,7 +154,6 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, "clickclick": { @@ -181,7 +175,6 @@ "sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1", "sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd" ], - "markers": "python_version >= '3.6'", "version": "==5.0.0" }, "connexion": { @@ -240,7 +233,6 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "docxtpl": { @@ -323,14 +315,12 @@ "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.4" }, "future": { "hashes": [ "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.2" }, "gunicorn": { @@ -353,7 +343,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -361,7 +350,6 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "importlib-metadata": { @@ -377,7 +365,6 @@ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], - "markers": "python_version >= '3.5'", "version": "==0.5.0" }, "itsdangerous": { @@ -385,7 +372,6 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jdcal": { @@ -400,7 +386,6 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, "jsonschema": { @@ -415,16 +400,11 @@ "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.6.11" }, "ldap3": { "hashes": [ - "sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744", "sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0", - "sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd", - "sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871", - "sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c", "sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b" ], "index": "pypi", @@ -472,7 +452,6 @@ "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27", "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.3" }, "markdown": { @@ -519,7 +498,6 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "marshmallow": { @@ -575,7 +553,6 @@ "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e", "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc" ], - "markers": "python_version >= '3.6'", "version": "==1.19.1" }, "openapi-spec-validator": { @@ -599,7 +576,6 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pandas": { @@ -662,19 +638,8 @@ }, "pyasn1": { "hashes": [ - "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", - "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", - "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", - "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", - "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", - "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", - "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", - "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", - "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", - "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776" + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" ], "version": "==0.4.8" }, @@ -683,7 +648,6 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -691,7 +655,6 @@ "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44", "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324" ], - "markers": "python_version >= '3.5'", "version": "==2.6.1" }, "pyjwt": { @@ -707,7 +670,6 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { @@ -721,7 +683,6 @@ "sha256:2df0d0e0769b6d6e7daed8d5e0b10a38e0b5486ee75914c30f2a927f7a374111", "sha256:ddea019b4ee53fe3f822407b0b26ec54ff6233042c68b54244d3503ae4d6218f" ], - "markers": "python_version >= '3.6'", "version": "==5.0.1" }, "python-dateutil": { @@ -740,11 +701,9 @@ }, "python-editor": { "hashes": [ - "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", - "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", - "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", - "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d" + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" ], "version": "==1.0.4" }, @@ -803,7 +762,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -818,7 +776,6 @@ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" ], - "markers": "python_version >= '3.5'", "version": "==2.0.1" }, "sphinx": { @@ -834,7 +791,6 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -842,7 +798,6 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -850,7 +805,6 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -858,7 +812,6 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], - "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -866,7 +819,6 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -874,12 +826,11 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "spiffworkflow": { "git": "https://github.com/sartography/SpiffWorkflow.git", - "ref": "11ad40bbcb0fbd3c5bc1078e4989dc38b749f7f3" + "ref": "5785d3cab99e319596e1bf0006df96f215febafd" }, "sqlalchemy": { "hashes": [ @@ -912,7 +863,6 @@ "sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274", "sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.18" }, "swagger-ui-bundle": { @@ -928,7 +878,6 @@ "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.10" }, "vine": { @@ -936,7 +885,6 @@ "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.0" }, "waitress": { @@ -944,7 +892,6 @@ "sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261", "sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.4.4" }, "webob": { @@ -952,7 +899,6 @@ "sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b", "sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.6" }, "webtest": { @@ -999,7 +945,6 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], - "markers": "python_version >= '3.6'", "version": "==3.1.0" } }, @@ -1009,7 +954,6 @@ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "coverage": { @@ -1065,7 +1009,6 @@ "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" ], - "markers": "python_version >= '3.5'", "version": "==8.4.0" }, "packaging": { @@ -1073,7 +1016,6 @@ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4" }, "pbr": { @@ -1089,7 +1031,6 @@ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { @@ -1097,7 +1038,6 @@ "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2", "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.9.0" }, "pyparsing": { @@ -1105,7 +1045,6 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pytest": { @@ -1121,7 +1060,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "wcwidth": { @@ -1136,7 +1074,6 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], - "markers": "python_version >= '3.6'", "version": "==3.1.0" } } diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 65794037..e078166b 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -470,6 +470,7 @@ class WorkflowService(object): db.session.query(TaskEventModel). \ filter(TaskEventModel.workflow_id == processor.workflow_model.id). \ filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete() + db.session.commit() for task in processor.get_current_user_tasks(): user_ids = WorkflowService.get_users_assigned_to_task(processor, task) diff --git a/tests/test_user_roles.py b/tests/test_user_roles.py index ed879d2e..084df85d 100644 --- a/tests/test_user_roles.py +++ b/tests/test_user_roles.py @@ -200,4 +200,65 @@ class TestTasksApi(BaseTest): workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) self.assertEquals('COMPLETED', workflow_api.next_task.state) self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end. - self.assertEquals(WorkflowStatus.complete, workflow_api.status) \ No newline at end of file + self.assertEquals(WorkflowStatus.complete, workflow_api.status) + + def get_assignment_task_events(self, uid): + return db.session.query(TaskEventModel). \ + filter(TaskEventModel.user_uid == uid). \ + filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).all() + + def test_workflow_reset_correctly_resets_the_task_events(self): + + submitter = self.create_user(uid='lje5u') + supervisor = self.create_user(uid='lb3dp') + workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid) + workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) + + # User lje5u can complete the first task, and set her supervisor + data = workflow_api.next_task.data + data['supervisor'] = supervisor.uid + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + + # At this point there should be a task_log with an action of ASSIGNMENT on it for + # the supervisor. + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Resetting the workflow at this point should clear the event log. + workflow_api = self.get_workflow_api(workflow, hard_reset=True, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Re-complete first task, and awaiting tasks should shift to 0 for for submitter, and 1 for supervisor + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Complete the supervisor task with rejected approval, and the assignments should switch. + workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid) + data = workflow_api.next_task.data + data["approval"] = False + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid) + self.assertEquals(1, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Mark the return form review page as complete, and then recomplete the form, and assignments switch yet again. + workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid) + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid))) + + # Complete the supervisor task, accepting the approval, and the workflow is completed. + # When it is all done, there should be no outstanding assignments. + workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid) + data = workflow_api.next_task.data + data["approval"] = True + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid) + self.assertEquals(WorkflowStatus.complete, workflow_api.status) + self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end. + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) + + # Sending any subsequent complete forms does not result in a new task event + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) + self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) From 452f2c3723c98462dd3e9553d3ec90b4514c685d Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Mon, 27 Jul 2020 14:38:57 -0400 Subject: [PATCH 13/31] Building out a user service for getting the current user, it will provide a number of functions, one of which will allow administrative users to impersonate other users in some circumstances (but will assure that we log events correctly when an impersonation occures) --- README.md | 3 +++ crc/api/common.py | 3 +++ crc/api/study.py | 8 ++++--- crc/api/user.py | 23 ++++++++------------ crc/api/workflow.py | 28 +++++++++++------------- crc/models/user.py | 5 ++++- crc/services/user_service.py | 37 ++++++++++++++++++++++++++++++++ crc/services/workflow_service.py | 5 +++-- tests/base_test.py | 5 ++++- 9 files changed, 80 insertions(+), 37 deletions(-) create mode 100644 crc/services/user_service.py diff --git a/README.md b/README.md index 6bd7dd67..e559f044 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,9 @@ Make sure all of the following are properly installed on your system: - [Install pipenv](https://pipenv-es.readthedocs.io/es/stable/) - [Add ${HOME}/.local/bin to your PATH](https://github.com/pypa/pipenv/issues/2122#issue-319600584) +### Running Postgres + + ### Project Initialization 1. Clone this repository. 2. In PyCharm: diff --git a/crc/api/common.py b/crc/api/common.py index cb527c73..31a2c8df 100644 --- a/crc/api/common.py +++ b/crc/api/common.py @@ -1,5 +1,6 @@ from SpiffWorkflow import WorkflowException from SpiffWorkflow.exceptions import WorkflowTaskExecException +from flask import g from crc import ma, app @@ -60,3 +61,5 @@ class ApiErrorSchema(ma.Schema): def handle_invalid_usage(error): response = ApiErrorSchema().dump(error) return response, error.status_code + + diff --git a/crc/api/study.py b/crc/api/study.py index 8fdd1b4a..e288ee2f 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -8,6 +8,7 @@ from crc.api.common import ApiError, ApiErrorSchema from crc.models.protocol_builder import ProtocolBuilderStatus from crc.models.study import StudySchema, StudyModel, Study from crc.services.study_service import StudyService +from crc.services.user_service import UserService def add_study(body): @@ -17,7 +18,7 @@ def add_study(body): if 'title' not in body: raise ApiError("missing_title", "Can't create a new study without a title.") - study_model = StudyModel(user_uid=g.user.uid, + study_model = StudyModel(user_uid=UserService.current_user().uid, title=body['title'], primary_investigator_id=body['primary_investigator_id'], last_updated=datetime.now(), @@ -65,8 +66,9 @@ def delete_study(study_id): def user_studies(): """Returns all the studies associated with the current user. """ - StudyService.synch_with_protocol_builder_if_enabled(g.user) - studies = StudyService.get_studies_for_user(g.user) + user = UserService.current_user(allow_admin_impersonate=True) + StudyService.synch_with_protocol_builder_if_enabled(user) + studies = StudyService.get_studies_for_user(user) results = StudySchema(many=True).dump(studies) return results diff --git a/crc/api/user.py b/crc/api/user.py index fc86bd02..49b447ac 100644 --- a/crc/api/user.py +++ b/crc/api/user.py @@ -63,13 +63,15 @@ def verify_token(token=None): # Fall back to a default user if this is not production. g.user = UserModel.query.first() token = g.user.encode_auth_token() + token_info = UserModel.decode_auth_token(token) + return token_info def verify_token_admin(token=None): """ - Verifies the token for the user (if provided) in non-production environment. If in production environment, - checks that the user is in the list of authorized admins + Verifies the token for the user (if provided) in non-production environment. + If in production environment, checks that the user is in the list of authorized admins Args: token: Optional[str] @@ -77,18 +79,11 @@ def verify_token_admin(token=None): Returns: token: str """ - - # If this is production, check that the user is in the list of admins - if _is_production(): - uid = _get_request_uid(request) - - if uid is not None and uid in app.config['ADMIN_UIDS']: - return verify_token() - - # If we're not in production, just use the normal verify_token method - else: - return verify_token(token) - + verify_token(token) + if "user" in g and g.user.is_admin(): + token = g.user.encode_auth_token() + token_info = UserModel.decode_auth_token(token) + return token_info def get_current_user(): return UserModelSchema().dump(g.user) diff --git a/crc/api/workflow.py b/crc/api/workflow.py index a290d340..0279e6bf 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -13,6 +13,7 @@ from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, Workflow from crc.services.file_service import FileService from crc.services.lookup_service import LookupService from crc.services.study_service import StudyService +from crc.services.user_service import UserService from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_service import WorkflowService @@ -104,8 +105,10 @@ def get_workflow(workflow_id, soft_reset=False, hard_reset=False): def get_task_events(action): - """Provides a way to see a history of what has happened, or get a list of tasks that need your attention.""" - query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid) + """Provides a way to see a history of what has happened, or get a list of + tasks that need your attention.""" + user = UserService.current_user(allow_admin_impersonate=True) + query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == user.uid) if action: query = query.filter(TaskEventModel.action == action) events = query.all() @@ -130,7 +133,7 @@ def set_current_task(workflow_id, task_id): task_id = uuid.UUID(task_id) spiff_task = processor.bpmn_workflow.get_task(task_id) _verify_user_and_role(processor, spiff_task) - user_uid = g.user.uid + user_uid = UserService.current_user(allow_admin_impersonate=True).uid if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY: raise ApiError("invalid_state", "You may not move the token to a task who's state is not " "currently set to COMPLETE or READY.") @@ -173,7 +176,8 @@ def update_task(workflow_id, task_id, body, terminate_loop=None): processor.save() # Log the action, and any pending task assignments in the event of lanes in the workflow. - WorkflowService.log_task_action(g.user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE) + user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user. + WorkflowService.log_task_action(user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE) WorkflowService.update_task_assignments(processor) workflow_api_model = WorkflowService.processor_to_workflow_api(processor) @@ -233,19 +237,11 @@ def lookup(workflow_id, field_id, query=None, value=None, limit=10): def _verify_user_and_role(processor, spiff_task): """Assures the currently logged in user can access the given workflow and task, or - raises an error. - Allow administrators to modify tasks, otherwise assure that the current user - is allowed to edit or update the task. Will raise the appropriate error if user - is not authorized. """ - - if 'user' not in g: - raise ApiError("logged_out", "You are no longer logged in.", status_code=401) - - if g.user.uid in app.config['ADMIN_UIDS']: - return g.user.uid + raises an error. """ + user = UserService.current_user(allow_admin_impersonate=True) allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task) - if g.user.uid not in allowed_users: + if user.uid not in allowed_users: raise ApiError.from_task("permission_denied", f"This task must be completed by '{allowed_users}', " - f"but you are {g.user.uid}", spiff_task) + f"but you are {user.uid}", spiff_task) diff --git a/crc/models/user.py b/crc/models/user.py index 221176bc..e621455b 100644 --- a/crc/models/user.py +++ b/crc/models/user.py @@ -18,9 +18,12 @@ class UserModel(db.Model): first_name = db.Column(db.String, nullable=True) last_name = db.Column(db.String, nullable=True) title = db.Column(db.String, nullable=True) - # TODO: Add Department and School + def is_admin(self): + # Currently admin abilities are set in the configuration, but this + # may change in the future. + return self.uid in app.config['ADMIN_UIDS'] def encode_auth_token(self): """ diff --git a/crc/services/user_service.py b/crc/services/user_service.py new file mode 100644 index 00000000..6b2887f5 --- /dev/null +++ b/crc/services/user_service.py @@ -0,0 +1,37 @@ +from flask import g + +from crc.api.common import ApiError + + +class UserService(object): + """Provides common tools for working with users""" + + @staticmethod + def has_user(): + if 'user' not in g or not g.user: + return False + else: + return True + + @staticmethod + def current_user(allow_admin_impersonate=False): + + if not UserService.has_user(): + raise ApiError("logged_out", "You are no longer logged in.", status_code=401) + + # Admins can pretend to be different users and act on a users behalf in + # some circumstances. + if g.user.is_admin() and allow_admin_impersonate and "impersonate_user" in g: + return g.impersonate_user + else: + return g.user + + @staticmethod + def in_list(uids, allow_admin_impersonate=False): + """Returns true if the current user's id is in the given list of ids. False if there + is no user, or the user is not in the list.""" + if UserService.has_user(): # If someone is logged in, lock tasks that don't belong to them. + user = UserService.current_user(allow_admin_impersonate) + if user.uid in uids: + return True + return False diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 3205e800..d27fe223 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -30,6 +30,7 @@ from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel from crc.services.file_service import FileService from crc.services.lookup_service import LookupService from crc.services.study_service import StudyService +from crc.services.user_service import UserService from crc.services.workflow_processor import WorkflowProcessor @@ -239,7 +240,7 @@ class WorkflowService(object): nav_item['title'] = nav_item['task'].title # Prefer the task title. user_uids = WorkflowService.get_users_assigned_to_task(processor, spiff_task) - if 'user' not in g or not g.user or g.user.uid not in user_uids: + if not UserService.in_list(user_uids, allow_admin_impersonate=True): nav_item['state'] = WorkflowService.TASK_STATE_LOCKED else: @@ -272,7 +273,7 @@ class WorkflowService(object): workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True) # Update the state of the task to locked if the current user does not own the task. user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task) - if 'user' not in g or not g.user or g.user.uid not in user_uids: + if not UserService.in_list(user_uids, allow_admin_impersonate=True): workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED return workflow_api diff --git a/tests/base_test.py b/tests/base_test.py index 6ea1966d..1ff1af6f 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -2,6 +2,8 @@ # IMPORTANT - Environment must be loaded before app, models, etc.... import os +from crc.services.user_service import UserService + os.environ["TESTING"] = "true" import json @@ -118,7 +120,8 @@ class BaseTest(unittest.TestCase): self.assertIsNotNone(user_model.display_name) self.assertEqual(user_model.uid, uid) self.assertTrue('user' in g, 'User should be in Flask globals') - self.assertEqual(uid, g.user.uid, 'Logged in user should match given user uid') + user = UserService.current_user(allow_admin_impersonate=True) + self.assertEqual(uid, user.uid, 'Logged in user should match given user uid') return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode()) From de0fe705c32a44ce0b82083f6c54e1893ad1a05a Mon Sep 17 00:00:00 2001 From: Carlos Lopez Date: Mon, 27 Jul 2020 14:00:44 -0600 Subject: [PATCH 14/31] Wrapping LOCKED task update attempt into a try-catch block for tests --- tests/test_user_roles.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_user_roles.py b/tests/test_user_roles.py index 084df85d..74871476 100644 --- a/tests/test_user_roles.py +++ b/tests/test_user_roles.py @@ -259,6 +259,8 @@ class TestTasksApi(BaseTest): self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) # Sending any subsequent complete forms does not result in a new task event - workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + with self.assertRaises(AssertionError) as _api_error: + workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid) + self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid))) self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid))) From 8d42d520a019e6181b319829fe5d2eecc249c5ab Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Mon, 27 Jul 2020 16:32:23 -0400 Subject: [PATCH 15/31] Adding events to the study that is returned via the api. --- crc/models/study.py | 7 +++++-- crc/services/study_service.py | 16 +++++++++++----- tests/study/test_study_api.py | 17 +++++++++++++++++ 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/crc/models/study.py b/crc/models/study.py index bc92e5e1..f6c03736 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -41,8 +41,10 @@ class StudyModel(db.Model): class WorkflowMetadata(object): - def __init__(self, id, name, display_name, description, spec_version, category_id, category_display_name, state: WorkflowState, status: WorkflowStatus, - total_tasks, completed_tasks, display_order): + def __init__(self, id, name = None, display_name = None, description = None, spec_version = None, + category_id = None, category_display_name = None, state: WorkflowState = None, + status: WorkflowStatus = None, total_tasks = None, completed_tasks = None, + display_order = None): self.id = id self.name = name self.display_name = display_name @@ -176,6 +178,7 @@ class StudySchema(ma.Schema): files = fields.List(fields.Nested(FileSchema), dump_only=True) approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True) enrollment_date = fields.Date(allow_none=True) + events = fields.List(fields.Nested('TaskEventSchema'), dump_only=True) class Meta: model = Study diff --git a/crc/services/study_service.py b/crc/services/study_service.py index fbc62d01..cbf3434d 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -1,6 +1,5 @@ from copy import copy from datetime import datetime -import json from typing import List import requests @@ -13,16 +12,15 @@ from crc.api.common import ApiError from crc.models.file import FileModel, FileModelSchema, File from crc.models.ldap import LdapSchema from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus -from crc.models.task_event import TaskEventModel from crc.models.study import StudyModel, Study, Category, WorkflowMetadata +from crc.models.task_event import TaskEventModel, TaskEvent from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \ WorkflowStatus +from crc.services.approval_service import ApprovalService from crc.services.file_service import FileService from crc.services.ldap_service import LdapService from crc.services.protocol_builder import ProtocolBuilderService from crc.services.workflow_processor import WorkflowProcessor -from crc.services.approval_service import ApprovalService -from crc.models.approval import Approval class StudyService(object): @@ -63,7 +61,7 @@ class StudyService(object): files = (File.from_models(model, FileService.get_file_data(model.id), FileService.get_doc_dictionary()) for model in files) study.files = list(files) - + study.events = StudyService.get_events(study_id) # Calling this line repeatedly is very very slow. It creates the # master spec and runs it. Don't execute this for Abandoned studies, as # we don't have the information to process them. @@ -77,6 +75,14 @@ class StudyService(object): return study + @staticmethod + def get_events(study_id): + event_models = db.session.query(TaskEventModel).filter(TaskEventModel.study_id == study_id).all() + events = [] + for event_model in event_models: + events.append(TaskEvent(event_model, None, WorkflowMetadata(id=event_model.workflow_id))) + return events + @staticmethod def delete_study(study_id): session.query(TaskEventModel).filter_by(study_id=study_id).delete() diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index 3b781f50..9ed7bb2c 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -1,4 +1,5 @@ import json + from tests.base_test import BaseTest from datetime import datetime, timezone @@ -13,6 +14,7 @@ from crc.models.study import StudyModel, StudySchema from crc.models.workflow import WorkflowSpecModel, WorkflowModel from crc.services.file_service import FileService from crc.services.workflow_processor import WorkflowProcessor +from crc.services.workflow_service import WorkflowService class TestStudyApi(BaseTest): @@ -112,6 +114,21 @@ class TestStudyApi(BaseTest): for approval in study.approvals: self.assertEqual(full_study['study'].title, approval['title']) + def test_get_study_has_details_about_events(self): + # Set up the study and attach a file to it. + self.load_example_data() + workflow = self.create_workflow('file_upload_form') + processor = WorkflowProcessor(workflow) + task = processor.next_task() + WorkflowService.log_task_action('dhf8r', processor, task, 'my_action') + api_response = self.app.get('/v1.0/study/%i' % workflow.study_id, + headers=self.logged_in_headers(), + content_type="application/json") + self.assert_success(api_response) + study = json.loads(api_response.get_data(as_text=True)) + self.assertEqual(1, len(study['events'])) + self.assertEqual('my_action', study['events'][0]['action']) + def test_add_study(self): self.load_example_data() study = self.add_test_study() From 51d6d6a5fae8dc8fec3ecbb6628fbe36e40f2be6 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Mon, 27 Jul 2020 16:54:46 -0400 Subject: [PATCH 16/31] Fixing failing tests around a notifications workflow. --- crc/static/bpmn/notifications/notifications.bpmn | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crc/static/bpmn/notifications/notifications.bpmn b/crc/static/bpmn/notifications/notifications.bpmn index a9fdedbf..3fdbbba6 100644 --- a/crc/static/bpmn/notifications/notifications.bpmn +++ b/crc/static/bpmn/notifications/notifications.bpmn @@ -22,7 +22,11 @@ - + + + + + Flow_0q51aiq @@ -49,7 +53,11 @@ - + + + + + Flow_0d2snmk From 0cb480801ba4ce434fce55ed11eb3e475158aa71 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Mon, 27 Jul 2020 17:05:01 -0400 Subject: [PATCH 17/31] Provide event data on the Study api endpoint. Speed up the tests a little, because that got out of hand. Need to dig into what is causing this problem. --- crc/models/study.py | 7 +++++-- crc/services/study_service.py | 16 +++++++++++----- tests/study/test_study_api.py | 17 +++++++++++++++++ tests/test_tasks_api.py | 20 -------------------- 4 files changed, 33 insertions(+), 27 deletions(-) diff --git a/crc/models/study.py b/crc/models/study.py index 854ce62f..669ca535 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -41,8 +41,10 @@ class StudyModel(db.Model): class WorkflowMetadata(object): - def __init__(self, id, name, display_name, description, spec_version, category_id, category_display_name, state: WorkflowState, status: WorkflowStatus, - total_tasks, completed_tasks, display_order): + def __init__(self, id, name = None, display_name = None, description = None, spec_version = None, + category_id = None, category_display_name = None, state: WorkflowState = None, + status: WorkflowStatus = None, total_tasks = None, completed_tasks = None, + display_order = None): self.id = id self.name = name self.display_name = display_name @@ -157,6 +159,7 @@ class StudySchema(ma.Schema): files = fields.List(fields.Nested(FileSchema), dump_only=True) approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True) enrollment_date = fields.Date(allow_none=True) + events = fields.List(fields.Nested('TaskEventSchema'), dump_only=True) class Meta: model = Study diff --git a/crc/services/study_service.py b/crc/services/study_service.py index fbc62d01..cbf3434d 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -1,6 +1,5 @@ from copy import copy from datetime import datetime -import json from typing import List import requests @@ -13,16 +12,15 @@ from crc.api.common import ApiError from crc.models.file import FileModel, FileModelSchema, File from crc.models.ldap import LdapSchema from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus -from crc.models.task_event import TaskEventModel from crc.models.study import StudyModel, Study, Category, WorkflowMetadata +from crc.models.task_event import TaskEventModel, TaskEvent from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \ WorkflowStatus +from crc.services.approval_service import ApprovalService from crc.services.file_service import FileService from crc.services.ldap_service import LdapService from crc.services.protocol_builder import ProtocolBuilderService from crc.services.workflow_processor import WorkflowProcessor -from crc.services.approval_service import ApprovalService -from crc.models.approval import Approval class StudyService(object): @@ -63,7 +61,7 @@ class StudyService(object): files = (File.from_models(model, FileService.get_file_data(model.id), FileService.get_doc_dictionary()) for model in files) study.files = list(files) - + study.events = StudyService.get_events(study_id) # Calling this line repeatedly is very very slow. It creates the # master spec and runs it. Don't execute this for Abandoned studies, as # we don't have the information to process them. @@ -77,6 +75,14 @@ class StudyService(object): return study + @staticmethod + def get_events(study_id): + event_models = db.session.query(TaskEventModel).filter(TaskEventModel.study_id == study_id).all() + events = [] + for event_model in event_models: + events.append(TaskEvent(event_model, None, WorkflowMetadata(id=event_model.workflow_id))) + return events + @staticmethod def delete_study(study_id): session.query(TaskEventModel).filter_by(study_id=study_id).delete() diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index 3b781f50..9ed7bb2c 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -1,4 +1,5 @@ import json + from tests.base_test import BaseTest from datetime import datetime, timezone @@ -13,6 +14,7 @@ from crc.models.study import StudyModel, StudySchema from crc.models.workflow import WorkflowSpecModel, WorkflowModel from crc.services.file_service import FileService from crc.services.workflow_processor import WorkflowProcessor +from crc.services.workflow_service import WorkflowService class TestStudyApi(BaseTest): @@ -112,6 +114,21 @@ class TestStudyApi(BaseTest): for approval in study.approvals: self.assertEqual(full_study['study'].title, approval['title']) + def test_get_study_has_details_about_events(self): + # Set up the study and attach a file to it. + self.load_example_data() + workflow = self.create_workflow('file_upload_form') + processor = WorkflowProcessor(workflow) + task = processor.next_task() + WorkflowService.log_task_action('dhf8r', processor, task, 'my_action') + api_response = self.app.get('/v1.0/study/%i' % workflow.study_id, + headers=self.logged_in_headers(), + content_type="application/json") + self.assert_success(api_response) + study = json.loads(api_response.get_data(as_text=True)) + self.assertEqual(1, len(study['events'])) + self.assertEqual('my_action', study['events'][0]['action']) + def test_add_study(self): self.load_example_data() study = self.add_test_study() diff --git a/tests/test_tasks_api.py b/tests/test_tasks_api.py index 8284313d..9b8b5d68 100644 --- a/tests/test_tasks_api.py +++ b/tests/test_tasks_api.py @@ -69,7 +69,6 @@ class TestTasksApi(BaseTest): self.assertIsNotNone(val) def test_error_message_on_bad_gateway_expression(self): - self.load_example_data() workflow = self.create_workflow('exclusive_gateway') # get the first form in the two form workflow. @@ -77,7 +76,6 @@ class TestTasksApi(BaseTest): self.complete_form(workflow, task, {"has_bananas": True}) def test_workflow_with_parallel_forms(self): - self.load_example_data() workflow = self.create_workflow('exclusive_gateway') # get the first form in the two form workflow. @@ -89,7 +87,6 @@ class TestTasksApi(BaseTest): self.assertEqual("Task_Num_Bananas", workflow_api.next_task.name) def test_navigation_with_parallel_forms(self): - self.load_example_data() workflow = self.create_workflow('exclusive_gateway') # get the first form in the two form workflow. @@ -107,7 +104,6 @@ class TestTasksApi(BaseTest): self.assertEqual("NOOP", nav[3]['state']) def test_navigation_with_exclusive_gateway(self): - self.load_example_data() workflow = self.create_workflow('exclusive_gateway_2') # get the first form in the two form workflow. @@ -124,7 +120,6 @@ class TestTasksApi(BaseTest): self.assertEqual("Task 3", nav[6]['title']) def test_document_added_to_workflow_shows_up_in_file_list(self): - self.load_example_data() self.create_reference_document() workflow = self.create_workflow('docx') @@ -153,7 +148,6 @@ class TestTasksApi(BaseTest): def test_get_documentation_populated_in_end(self): - self.load_example_data() workflow = self.create_workflow('random_fact') workflow_api = self.get_workflow_api(workflow) task = workflow_api.next_task @@ -167,9 +161,7 @@ class TestTasksApi(BaseTest): self.assertTrue("norris" in workflow_api.next_task.documentation) def test_load_workflow_from_outdated_spec(self): - # Start the basic two_forms workflow and complete a task. - self.load_example_data() workflow = self.create_workflow('two_forms') workflow_api = self.get_workflow_api(workflow) self.complete_form(workflow, workflow_api.next_task, {"color": "blue"}) @@ -194,9 +186,7 @@ class TestTasksApi(BaseTest): self.assertTrue(workflow_api.is_latest_spec) def test_soft_reset_errors_out_and_next_result_is_on_original_version(self): - # Start the basic two_forms workflow and complete a task. - self.load_example_data() workflow = self.create_workflow('two_forms') workflow_api = self.get_workflow_api(workflow) self.complete_form(workflow, workflow_api.next_task, {"color": "blue"}) @@ -221,7 +211,6 @@ class TestTasksApi(BaseTest): def test_manual_task_with_external_documentation(self): - self.load_example_data() workflow = self.create_workflow('manual_task_with_external_documentation') # get the first form in the two form workflow. @@ -235,7 +224,6 @@ class TestTasksApi(BaseTest): self.assertTrue('Dan' in workflow_api.next_task.documentation) def test_bpmn_extension_properties_are_populated(self): - self.load_example_data() workflow = self.create_workflow('manual_task_with_external_documentation') # get the first form in the two form workflow. @@ -268,9 +256,7 @@ class TestTasksApi(BaseTest): # Assure that the names for each task are properly updated, so they aren't all the same. self.assertEqual("Primary Investigator", workflow.next_task.properties['display_name']) - def test_lookup_endpoint_for_task_field_enumerations(self): - self.load_example_data() workflow = self.create_workflow('enum_options_with_search') # get the first form in the two form workflow. workflow = self.get_workflow_api(workflow) @@ -286,7 +272,6 @@ class TestTasksApi(BaseTest): self.assert_options_populated(results, ['CUSTOMER_NUMBER', 'CUSTOMER_NAME', 'CUSTOMER_CLASS_MEANING']) def test_lookup_endpoint_for_task_field_using_lookup_entry_id(self): - self.load_example_data() workflow = self.create_workflow('enum_options_with_search') # get the first form in the two form workflow. workflow = self.get_workflow_api(workflow) @@ -316,7 +301,6 @@ class TestTasksApi(BaseTest): # the key/values from the spreadsheet are added directly to the form and it shows up as # a dropdown. This tests the case of wanting to get additional data when a user selects # something from a dropdown. - self.load_example_data() workflow = self.create_workflow('enum_options_from_file') # get the first form in the two form workflow. workflow = self.get_workflow_api(workflow) @@ -334,7 +318,6 @@ class TestTasksApi(BaseTest): self.assertIsInstance(results[0]['data'], dict) def test_enum_from_task_data(self): - self.load_example_data() workflow = self.create_workflow('enum_options_from_task_data') # get the first form in the two form workflow. workflow_api = self.get_workflow_api(workflow) @@ -359,7 +342,6 @@ class TestTasksApi(BaseTest): self.assertEqual('Chesterfield', options[2]['data']['first_name']) def test_lookup_endpoint_for_task_ldap_field_lookup(self): - self.load_example_data() workflow = self.create_workflow('ldap_lookup') # get the first form workflow = self.get_workflow_api(workflow) @@ -378,7 +360,6 @@ class TestTasksApi(BaseTest): self.assertEqual(1, len(results)) def test_sub_process(self): - self.load_example_data() workflow = self.create_workflow('subprocess') workflow_api = self.get_workflow_api(workflow) @@ -399,7 +380,6 @@ class TestTasksApi(BaseTest): self.assertEqual(WorkflowStatus.complete, workflow_api.status) def test_update_task_resets_token(self): - self.load_example_data() workflow = self.create_workflow('exclusive_gateway') # Start the workflow. From 300026cbc842d899682673535e4be198796a4ff8 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Tue, 28 Jul 2020 10:16:48 -0400 Subject: [PATCH 18/31] Expanding the task events endpoint to accept workflow and study ids as additional filters. Removing events from the study endpoint, too noisy. --- crc/api.yml | 12 ++++++++++ crc/api/workflow.py | 6 ++++- crc/models/study.py | 1 - crc/models/task_event.py | 1 + crc/services/file_service.py | 3 ++- crc/services/study_service.py | 9 -------- tests/study/test_study_api.py | 16 +------------ tests/test_events.py | 43 +++++++++++++++++++++++++++++++++++ tests/test_user_roles.py | 1 + 9 files changed, 65 insertions(+), 27 deletions(-) create mode 100644 tests/test_events.py diff --git a/crc/api.yml b/crc/api.yml index 4c6ebd1b..f23f0ace 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -572,6 +572,18 @@ paths: description: The type of action the event documents, options include "ASSIGNMENT" for tasks that are waiting on you, "COMPLETE" for things have completed. schema: type: string + - name: workflow + in: query + required: false + description: Restrict results to the given workflow. + schema: + type: number + - name: study + in: query + required: false + description: Restrict results to the given study. + schema: + type: number get: operationId: crc.api.workflow.get_task_events summary: Returns a list of task events related to the current user. Can be filtered by type. diff --git a/crc/api/workflow.py b/crc/api/workflow.py index a290d340..3418d50a 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -103,11 +103,15 @@ def get_workflow(workflow_id, soft_reset=False, hard_reset=False): return WorkflowApiSchema().dump(workflow_api_model) -def get_task_events(action): +def get_task_events(action = None, workflow = None, study = None): """Provides a way to see a history of what has happened, or get a list of tasks that need your attention.""" query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid) if action: query = query.filter(TaskEventModel.action == action) + if workflow: + query = query.filter(TaskEventModel.workflow_id == workflow) + if study: + query = query.filter(TaskEventModel.study_id == study) events = query.all() # Turn the database records into something a little richer for the UI to use. diff --git a/crc/models/study.py b/crc/models/study.py index 669ca535..e14fe0a6 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -159,7 +159,6 @@ class StudySchema(ma.Schema): files = fields.List(fields.Nested(FileSchema), dump_only=True) approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True) enrollment_date = fields.Date(allow_none=True) - events = fields.List(fields.Nested('TaskEventSchema'), dump_only=True) class Meta: model = Study diff --git a/crc/models/task_event.py b/crc/models/task_event.py index c696bc26..aa05a4f7 100644 --- a/crc/models/task_event.py +++ b/crc/models/task_event.py @@ -57,6 +57,7 @@ class TaskEventSchema(ma.Schema): study = fields.Nested(StudySchema, dump_only=True) workflow = fields.Nested(WorkflowMetadataSchema, dump_only=True) + task_lane = fields.String(allow_none=True, required=False) class Meta: model = TaskEvent additional = ["id", "user_uid", "action", "task_id", "task_title", diff --git a/crc/services/file_service.py b/crc/services/file_service.py index 6ba2e1ad..8b5665c6 100644 --- a/crc/services/file_service.py +++ b/crc/services/file_service.py @@ -78,7 +78,8 @@ class FileService(object): """ Opens a reference file (assumes that it is xls file) and returns the data as a dictionary, each row keyed on the given index_column name. If there are columns that should be represented as integers, pass these as an array of int_columns, lest - you get '1.0' rather than '1' """ + you get '1.0' rather than '1' + fixme: This is stupid stupid slow. Place it in the database and just check if it is up to date.""" data_model = FileService.get_reference_file_data(reference_file_name) xls = ExcelFile(data_model.data) df = xls.parse(xls.sheet_names[0]) diff --git a/crc/services/study_service.py b/crc/services/study_service.py index cbf3434d..4eb8dde7 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -61,7 +61,6 @@ class StudyService(object): files = (File.from_models(model, FileService.get_file_data(model.id), FileService.get_doc_dictionary()) for model in files) study.files = list(files) - study.events = StudyService.get_events(study_id) # Calling this line repeatedly is very very slow. It creates the # master spec and runs it. Don't execute this for Abandoned studies, as # we don't have the information to process them. @@ -75,14 +74,6 @@ class StudyService(object): return study - @staticmethod - def get_events(study_id): - event_models = db.session.query(TaskEventModel).filter(TaskEventModel.study_id == study_id).all() - events = [] - for event_model in event_models: - events.append(TaskEvent(event_model, None, WorkflowMetadata(id=event_model.workflow_id))) - return events - @staticmethod def delete_study(study_id): session.query(TaskEventModel).filter_by(study_id=study_id).delete() diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index 9ed7bb2c..fb0a4dcf 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -1,4 +1,5 @@ import json +from profile import Profile from tests.base_test import BaseTest @@ -114,21 +115,6 @@ class TestStudyApi(BaseTest): for approval in study.approvals: self.assertEqual(full_study['study'].title, approval['title']) - def test_get_study_has_details_about_events(self): - # Set up the study and attach a file to it. - self.load_example_data() - workflow = self.create_workflow('file_upload_form') - processor = WorkflowProcessor(workflow) - task = processor.next_task() - WorkflowService.log_task_action('dhf8r', processor, task, 'my_action') - api_response = self.app.get('/v1.0/study/%i' % workflow.study_id, - headers=self.logged_in_headers(), - content_type="application/json") - self.assert_success(api_response) - study = json.loads(api_response.get_data(as_text=True)) - self.assertEqual(1, len(study['events'])) - self.assertEqual('my_action', study['events'][0]['action']) - def test_add_study(self): self.load_example_data() study = self.add_test_study() diff --git a/tests/test_events.py b/tests/test_events.py new file mode 100644 index 00000000..06005ee1 --- /dev/null +++ b/tests/test_events.py @@ -0,0 +1,43 @@ +import json + +from tests.base_test import BaseTest +from crc.models.workflow import WorkflowStatus +from crc import db +from crc.api.common import ApiError +from crc.models.task_event import TaskEventModel, TaskEventSchema +from crc.services.workflow_service import WorkflowService + + +class TestEvents(BaseTest): + + + def test_list_events_by_workflow(self): + workflow_one = self.create_workflow('exclusive_gateway') + + # Start a the workflow. + first_task = self.get_workflow_api(workflow_one).next_task + self.complete_form(workflow_one, first_task, {"has_bananas": True}) + workflow_one = self.get_workflow_api(workflow_one) + self.assertEqual('Task_Num_Bananas', workflow_one.next_task.name) + + # Start a second workflow + workflow_two = self.create_workflow('subprocess') + workflow_api_two = self.get_workflow_api(workflow_two) + + # Get all action events across workflows + rv = self.app.get('/v1.0/task_events?action=ASSIGNMENT', + headers=self.logged_in_headers(), + content_type="application/json") + self.assert_success(rv) + json_data = json.loads(rv.get_data(as_text=True)) + tasks = TaskEventSchema(many=True).load(json_data) + self.assertEqual(2, len(tasks)) + + # Get action events for a single workflow + rv = self.app.get(f'/v1.0/task_events?action=ASSIGNMENT&workflow={workflow_one.id}', + headers=self.logged_in_headers(), + content_type="application/json") + self.assert_success(rv) + json_data = json.loads(rv.get_data(as_text=True)) + tasks = TaskEventSchema(many=True).load(json_data) + self.assertEqual(1, len(tasks)) diff --git a/tests/test_user_roles.py b/tests/test_user_roles.py index 74871476..ce3b03b5 100644 --- a/tests/test_user_roles.py +++ b/tests/test_user_roles.py @@ -111,6 +111,7 @@ class TestTasksApi(BaseTest): data['approval'] = True self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid) + def test_navigation_and_current_task_updates_through_workflow(self): submitter = self.create_user(uid='lje5u') From f15626033d95803c95bbd14e8d7d6ec4c7ae3c66 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Tue, 28 Jul 2020 13:33:38 -0400 Subject: [PATCH 19/31] Allow the workflow to be requested without making changes to the workflow - requires that you specify a read_only flag of true, otherwise it assumes that you want a fully prepared workflow with the next ready task set to run. --- crc/api.yml | 6 ++++++ crc/api/common.py | 5 +++++ crc/api/workflow.py | 13 +++++++++--- crc/models/api_models.py | 8 ++++--- crc/services/workflow_processor.py | 11 +++++----- crc/services/workflow_service.py | 5 +++-- tests/base_test.py | 9 ++++---- tests/files/test_files_api.py | 3 ++- tests/study/test_study_service.py | 1 + tests/test_tasks_api.py | 21 +++++++++++++++++++ tests/workflow/test_workflow_processor.py | 6 ++++++ .../test_workflow_processor_multi_instance.py | 1 + .../test_workflow_spec_validation_api.py | 4 ++-- 13 files changed, 73 insertions(+), 20 deletions(-) diff --git a/crc/api.yml b/crc/api.yml index f23f0ace..6304d513 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -622,6 +622,12 @@ paths: description: Set this to true to reset the workflow schema: type: boolean + - name: read_only + in: query + required: false + description: Does not run any automatic or script tasks and should not be used for updates. + schema: + type: boolean tags: - Workflows and Tasks responses: diff --git a/crc/api/common.py b/crc/api/common.py index cb527c73..f200401d 100644 --- a/crc/api/common.py +++ b/crc/api/common.py @@ -24,6 +24,11 @@ class ApiError(Exception): instance.task_id = task.task_spec.name or "" instance.task_name = task.task_spec.description or "" instance.file_name = task.workflow.spec.file or "" + + # Fixme: spiffworkflow is doing something weird where task ends up referenced in the data in some cases. + if "task" in task.data: + task.data.pop("task") + instance.task_data = task.data app.logger.error(message, exc_info=True) return instance diff --git a/crc/api/workflow.py b/crc/api/workflow.py index 3418d50a..5a9f28e6 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -95,11 +95,18 @@ def delete_workflow_specification(spec_id): session.commit() -def get_workflow(workflow_id, soft_reset=False, hard_reset=False): +def get_workflow(workflow_id, soft_reset=False, hard_reset=False, read_only=False): + """Soft reset will attempt to update to the latest spec without starting over, + Hard reset will update to the latest spec and start from the beginning. + Read Only will return the workflow in a read only state, without running any + engine tasks or logging any events. """ workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first() processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset) - workflow_api_model = WorkflowService.processor_to_workflow_api(processor) - WorkflowService.update_task_assignments(processor) + if not read_only: + processor.do_engine_steps() + processor.save() + WorkflowService.update_task_assignments(processor) + workflow_api_model = WorkflowService.processor_to_workflow_api(processor, read_only=read_only) return WorkflowApiSchema().dump(workflow_api_model) diff --git a/crc/models/api_models.py b/crc/models/api_models.py index 843609e0..6b8d17db 100644 --- a/crc/models/api_models.py +++ b/crc/models/api_models.py @@ -143,7 +143,8 @@ class NavigationItemSchema(ma.Schema): class WorkflowApi(object): def __init__(self, id, status, next_task, navigation, - spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, last_updated, title): + spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, + last_updated, title, read_only): self.id = id self.status = status self.next_task = next_task # The next task that requires user input. @@ -155,13 +156,14 @@ class WorkflowApi(object): self.completed_tasks = completed_tasks self.last_updated = last_updated self.title = title + self.read_only = read_only class WorkflowApiSchema(ma.Schema): class Meta: model = WorkflowApi fields = ["id", "status", "next_task", "navigation", "workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks", - "last_updated", "title"] + "last_updated", "title", "read_only"] unknown = INCLUDE status = EnumField(WorkflowStatus) @@ -172,7 +174,7 @@ class WorkflowApiSchema(ma.Schema): def make_workflow(self, data, **kwargs): keys = ['id', 'status', 'next_task', 'navigation', 'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks", - "last_updated", "title"] + "last_updated", "title", "read_only"] filtered_fields = {key: data[key] for key in keys} filtered_fields['next_task'] = TaskSchema().make_task(data['next_task']) return WorkflowApi(**filtered_fields) diff --git a/crc/services/workflow_processor.py b/crc/services/workflow_processor.py index 165d3313..535fb3eb 100644 --- a/crc/services/workflow_processor.py +++ b/crc/services/workflow_processor.py @@ -117,7 +117,8 @@ class WorkflowProcessor(object): STUDY_ID_KEY = "study_id" VALIDATION_PROCESS_KEY = "validate_only" - def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False, validate_only=False): + def __init__(self, workflow_model: WorkflowModel, + soft_reset=False, hard_reset=False, validate_only=False): """Create a Workflow Processor based on the serialized information available in the workflow model. If soft_reset is set to true, it will try to use the latest version of the workflow specification without resetting to the beginning of the workflow. This will work for some minor changes to the spec. @@ -180,10 +181,10 @@ class WorkflowProcessor(object): bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine) bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only - try: - bpmn_workflow.do_engine_steps() - except WorkflowException as we: - raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender) +# try: +# bpmn_workflow.do_engine_steps() +# except WorkflowException as we: +# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender) return bpmn_workflow def save(self): diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index e078166b..9adbbd3c 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -216,7 +216,7 @@ class WorkflowService(object): return ''.join(random.choice(letters) for i in range(string_length)) @staticmethod - def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None): + def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None, read_only=False): """Returns an API model representing the state of the current workflow, if requested, and possible, next_task is set to the current_task.""" @@ -260,7 +260,8 @@ class WorkflowService(object): total_tasks=len(navigation), completed_tasks=processor.workflow_model.completed_tasks, last_updated=processor.workflow_model.last_updated, - title=spec.display_name + title=spec.display_name, + read_only=read_only ) if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks. # This may or may not work, sometimes there is no next task to complete. diff --git a/tests/base_test.py b/tests/base_test.py index 3f0b2405..d627fb9f 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -308,12 +308,13 @@ class BaseTest(unittest.TestCase): db.session.commit() return approval - def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, user_uid="dhf8r"): + def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, read_only=False, user_uid="dhf8r"): user = session.query(UserModel).filter_by(uid=user_uid).first() self.assertIsNotNone(user) - - rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' % - (workflow.id, str(soft_reset), str(hard_reset)), + rv = self.app.get(f'/v1.0/workflow/{workflow.id}' + f'?soft_reset={str(soft_reset)}' + f'&hard_reset={str(hard_reset)}' + f'&read_only={str(read_only)}', headers=self.logged_in_headers(user), content_type="application/json") self.assert_success(rv) diff --git a/tests/files/test_files_api.py b/tests/files/test_files_api.py index 59e6c1f6..02feb8d0 100644 --- a/tests/files/test_files_api.py +++ b/tests/files/test_files_api.py @@ -72,10 +72,10 @@ class TestFilesApi(BaseTest): self.assertEqual(file, file2) def test_add_file_from_task_and_form_errors_on_invalid_form_field_name(self): - self.load_example_data() self.create_reference_document() workflow = self.create_workflow('file_upload_form') processor = WorkflowProcessor(workflow) + processor.do_engine_steps() task = processor.next_task() data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} correct_name = task.task_spec.form.fields[0].id @@ -96,6 +96,7 @@ class TestFilesApi(BaseTest): self.create_reference_document() workflow = self.create_workflow('file_upload_form') processor = WorkflowProcessor(workflow) + processor.do_engine_steps() task = processor.next_task() data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} correct_name = task.task_spec.form.fields[0].id diff --git a/tests/study/test_study_service.py b/tests/study/test_study_service.py index f1e43c8a..7ba5f568 100644 --- a/tests/study/test_study_service.py +++ b/tests/study/test_study_service.py @@ -79,6 +79,7 @@ class TestStudyService(BaseTest): # Initialize the Workflow with the workflow processor. workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first() processor = WorkflowProcessor(workflow_model) + processor.do_engine_steps() # Assure the workflow is now started, and knows the total and completed tasks. studies = StudyService.get_studies_for_user(user) diff --git a/tests/test_tasks_api.py b/tests/test_tasks_api.py index 9b8b5d68..c1be63b6 100644 --- a/tests/test_tasks_api.py +++ b/tests/test_tasks_api.py @@ -9,6 +9,7 @@ from crc import session, app from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSchema from crc.models.file import FileModelSchema from crc.models.workflow import WorkflowStatus +from crc.models.task_event import TaskEventModel class TestTasksApi(BaseTest): @@ -42,6 +43,24 @@ class TestTasksApi(BaseTest): """ self.assertTrue(str.startswith(task.documentation, expected_docs)) + def test_get_read_only_workflow(self): + # Set up a new workflow + workflow = self.create_workflow('two_forms') + # get the first form in the two form workflow. + workflow_api = self.get_workflow_api(workflow, read_only=True) + + # There should be no task event logs related to the workflow at this point. + task_events = session.query(TaskEventModel).filter(TaskEventModel.workflow_id == workflow.id).all() + self.assertEqual(0, len(task_events)) + + # Since the workflow was not started, the call to read-only should not execute any engine steps the + # current task should be the start event. + self.assertEqual("Start", workflow_api.next_task.name) + + # the workflow_api should have a read_only attribute set to true + self.assertEquals(True, workflow_api.read_only) + + def test_two_forms_task(self): # Set up a new workflow self.load_example_data() @@ -457,3 +476,5 @@ class TestTasksApi(BaseTest): workflow = self.get_workflow_api(workflow) self.assertEqual(WorkflowStatus.complete, workflow.status) + + diff --git a/tests/workflow/test_workflow_processor.py b/tests/workflow/test_workflow_processor.py index a51f029d..8b75dfb3 100644 --- a/tests/workflow/test_workflow_processor.py +++ b/tests/workflow/test_workflow_processor.py @@ -36,6 +36,7 @@ class TestWorkflowProcessor(BaseTest): workflow_spec_model = self.load_test_spec("random_fact") study = session.query(StudyModel).first() processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY]) self.assertIsNotNone(processor) self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) @@ -62,6 +63,7 @@ class TestWorkflowProcessor(BaseTest): files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all() self.assertEqual(2, len(files)) processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) next_user_tasks = processor.next_user_tasks() self.assertEqual(1, len(next_user_tasks)) @@ -86,6 +88,7 @@ class TestWorkflowProcessor(BaseTest): workflow_spec_model = self.load_test_spec("parallel_tasks") study = session.query(StudyModel).first() processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) # Complete the first steps of the 4 parallel tasks @@ -127,6 +130,7 @@ class TestWorkflowProcessor(BaseTest): study = session.query(StudyModel).first() workflow_spec_model = self.load_test_spec("parallel_tasks") processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) next_user_tasks = processor.next_user_tasks() self.assertEqual(4, len(next_user_tasks)) @@ -215,6 +219,7 @@ class TestWorkflowProcessor(BaseTest): self.assertEqual(2, len(files)) workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first() processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) next_user_tasks = processor.next_user_tasks() self.assertEqual(1, len(next_user_tasks)) @@ -278,6 +283,7 @@ class TestWorkflowProcessor(BaseTest): study = session.query(StudyModel).first() workflow_spec_model = self.load_test_spec("two_forms") processor = self.get_processor(study, workflow_spec_model) + processor.do_engine_steps() self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id) task = processor.next_task() task.data = {"color": "blue"} diff --git a/tests/workflow/test_workflow_processor_multi_instance.py b/tests/workflow/test_workflow_processor_multi_instance.py index a67cae7f..1473ed3a 100644 --- a/tests/workflow/test_workflow_processor_multi_instance.py +++ b/tests/workflow/test_workflow_processor_multi_instance.py @@ -47,6 +47,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest): workflow_spec_model = self.load_test_spec("multi_instance") study = session.query(StudyModel).first() processor = self.get_processor(study, workflow_spec_model) + processor.bpmn_workflow.do_engine_steps() self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY]) self.assertIsNotNone(processor) self.assertEqual(WorkflowStatus.user_input_required, processor.get_status()) diff --git a/tests/workflow/test_workflow_spec_validation_api.py b/tests/workflow/test_workflow_spec_validation_api.py index 0c17892e..da389168 100644 --- a/tests/workflow/test_workflow_spec_validation_api.py +++ b/tests/workflow/test_workflow_spec_validation_api.py @@ -89,7 +89,7 @@ class TestWorkflowSpecValidation(BaseTest): self.load_example_data() errors = self.validate_workflow("invalid_script") self.assertEqual(2, len(errors)) - self.assertEqual("error_loading_workflow", errors[0]['code']) + self.assertEqual("workflow_validation_exception", errors[0]['code']) self.assertTrue("NoSuchScript" in errors[0]['message']) self.assertEqual("Invalid_Script_Task", errors[0]['task_id']) self.assertEqual("An Invalid Script Reference", errors[0]['task_name']) @@ -99,7 +99,7 @@ class TestWorkflowSpecValidation(BaseTest): self.load_example_data() errors = self.validate_workflow("invalid_script2") self.assertEqual(2, len(errors)) - self.assertEqual("error_loading_workflow", errors[0]['code']) + self.assertEqual("workflow_validation_exception", errors[0]['code']) self.assertEqual("Invalid_Script_Task", errors[0]['task_id']) self.assertEqual("An Invalid Script Reference", errors[0]['task_name']) self.assertEqual("invalid_script2.bpmn", errors[0]['file_name']) From 4d11fc04a014e40baa456ec4986e537063bb0931 Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Tue, 28 Jul 2020 13:51:29 -0400 Subject: [PATCH 20/31] dropping the "read_only" flag in favor of a "do_engine_steps" flag, which more clearly defines what is happening. --- crc/api.yml | 4 ++-- crc/api/workflow.py | 6 +++--- crc/models/api_models.py | 7 +++---- crc/services/workflow_service.py | 5 ++--- tests/base_test.py | 4 ++-- tests/test_tasks_api.py | 8 ++++---- 6 files changed, 16 insertions(+), 18 deletions(-) diff --git a/crc/api.yml b/crc/api.yml index 6304d513..922c96af 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -622,10 +622,10 @@ paths: description: Set this to true to reset the workflow schema: type: boolean - - name: read_only + - name: do_engine_steps in: query required: false - description: Does not run any automatic or script tasks and should not be used for updates. + description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it. schema: type: boolean tags: diff --git a/crc/api/workflow.py b/crc/api/workflow.py index 5a9f28e6..5d185ae7 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -95,18 +95,18 @@ def delete_workflow_specification(spec_id): session.commit() -def get_workflow(workflow_id, soft_reset=False, hard_reset=False, read_only=False): +def get_workflow(workflow_id, soft_reset=False, hard_reset=False, do_engine_steps=True): """Soft reset will attempt to update to the latest spec without starting over, Hard reset will update to the latest spec and start from the beginning. Read Only will return the workflow in a read only state, without running any engine tasks or logging any events. """ workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first() processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset) - if not read_only: + if do_engine_steps: processor.do_engine_steps() processor.save() WorkflowService.update_task_assignments(processor) - workflow_api_model = WorkflowService.processor_to_workflow_api(processor, read_only=read_only) + workflow_api_model = WorkflowService.processor_to_workflow_api(processor) return WorkflowApiSchema().dump(workflow_api_model) diff --git a/crc/models/api_models.py b/crc/models/api_models.py index 6b8d17db..7d1088e9 100644 --- a/crc/models/api_models.py +++ b/crc/models/api_models.py @@ -144,7 +144,7 @@ class NavigationItemSchema(ma.Schema): class WorkflowApi(object): def __init__(self, id, status, next_task, navigation, spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, - last_updated, title, read_only): + last_updated, title): self.id = id self.status = status self.next_task = next_task # The next task that requires user input. @@ -156,14 +156,13 @@ class WorkflowApi(object): self.completed_tasks = completed_tasks self.last_updated = last_updated self.title = title - self.read_only = read_only class WorkflowApiSchema(ma.Schema): class Meta: model = WorkflowApi fields = ["id", "status", "next_task", "navigation", "workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks", - "last_updated", "title", "read_only"] + "last_updated", "title"] unknown = INCLUDE status = EnumField(WorkflowStatus) @@ -174,7 +173,7 @@ class WorkflowApiSchema(ma.Schema): def make_workflow(self, data, **kwargs): keys = ['id', 'status', 'next_task', 'navigation', 'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks", - "last_updated", "title", "read_only"] + "last_updated", "title"] filtered_fields = {key: data[key] for key in keys} filtered_fields['next_task'] = TaskSchema().make_task(data['next_task']) return WorkflowApi(**filtered_fields) diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index 9adbbd3c..e078166b 100644 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -216,7 +216,7 @@ class WorkflowService(object): return ''.join(random.choice(letters) for i in range(string_length)) @staticmethod - def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None, read_only=False): + def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None): """Returns an API model representing the state of the current workflow, if requested, and possible, next_task is set to the current_task.""" @@ -260,8 +260,7 @@ class WorkflowService(object): total_tasks=len(navigation), completed_tasks=processor.workflow_model.completed_tasks, last_updated=processor.workflow_model.last_updated, - title=spec.display_name, - read_only=read_only + title=spec.display_name ) if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks. # This may or may not work, sometimes there is no next task to complete. diff --git a/tests/base_test.py b/tests/base_test.py index d627fb9f..056ce090 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -308,13 +308,13 @@ class BaseTest(unittest.TestCase): db.session.commit() return approval - def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, read_only=False, user_uid="dhf8r"): + def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"): user = session.query(UserModel).filter_by(uid=user_uid).first() self.assertIsNotNone(user) rv = self.app.get(f'/v1.0/workflow/{workflow.id}' f'?soft_reset={str(soft_reset)}' f'&hard_reset={str(hard_reset)}' - f'&read_only={str(read_only)}', + f'&do_engine_steps={str(do_engine_steps)}', headers=self.logged_in_headers(user), content_type="application/json") self.assert_success(rv) diff --git a/tests/test_tasks_api.py b/tests/test_tasks_api.py index c1be63b6..02ad65ca 100644 --- a/tests/test_tasks_api.py +++ b/tests/test_tasks_api.py @@ -43,11 +43,11 @@ class TestTasksApi(BaseTest): """ self.assertTrue(str.startswith(task.documentation, expected_docs)) - def test_get_read_only_workflow(self): + def test_get_workflow_without_running_engine_steps(self): # Set up a new workflow workflow = self.create_workflow('two_forms') # get the first form in the two form workflow. - workflow_api = self.get_workflow_api(workflow, read_only=True) + workflow_api = self.get_workflow_api(workflow, do_engine_steps=False) # There should be no task event logs related to the workflow at this point. task_events = session.query(TaskEventModel).filter(TaskEventModel.workflow_id == workflow.id).all() @@ -57,8 +57,8 @@ class TestTasksApi(BaseTest): # current task should be the start event. self.assertEqual("Start", workflow_api.next_task.name) - # the workflow_api should have a read_only attribute set to true - self.assertEquals(True, workflow_api.read_only) + def test_get_form_for_previously_completed_task(self): + """Assure we can look at previously completed steps without moving the token for the workflow.""" def test_two_forms_task(self): From 0ea4c13d09b46b085d20b53faa6bc05e5037f42c Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Tue, 28 Jul 2020 17:16:48 -0400 Subject: [PATCH 21/31] Convert protocol builder status to always be in lower case in order to better match the front end. And also fixing an issue with the multi_instance that is oddly broken suddenly, and I don't know why. --- crc/api.yml | 2 +- crc/api/study.py | 2 +- crc/models/protocol_builder.py | 10 +++--- crc/models/study.py | 6 ++-- crc/services/study_service.py | 4 +-- migrations/versions/2e7b377cbc7b_.py | 32 +++++++++++++++++++ tests/base_test.py | 6 ++-- tests/data/multi_instance/multi_instance.bpmn | 4 +-- tests/study/test_study_api.py | 10 +++--- tests/study/test_study_service.py | 2 +- tests/test_authentication.py | 2 +- 11 files changed, 56 insertions(+), 24 deletions(-) create mode 100644 migrations/versions/2e7b377cbc7b_.py diff --git a/crc/api.yml b/crc/api.yml index 922c96af..b3d61fc1 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -1064,7 +1064,7 @@ components: example: dhf8r protocol_builder_status: type: string - enum: [INCOMPLETE, ACTIVE, HOLD, OPEN, ABANDONED] + enum: ['incomplete', 'active', 'hold', 'open', 'abandoned'] example: done sponsor: type: string diff --git a/crc/api/study.py b/crc/api/study.py index 8fdd1b4a..b5572527 100644 --- a/crc/api/study.py +++ b/crc/api/study.py @@ -21,7 +21,7 @@ def add_study(body): title=body['title'], primary_investigator_id=body['primary_investigator_id'], last_updated=datetime.now(), - protocol_builder_status=ProtocolBuilderStatus.ACTIVE) + protocol_builder_status=ProtocolBuilderStatus.active) session.add(study_model) errors = StudyService._add_all_workflow_specs_to_study(study_model) diff --git a/crc/models/protocol_builder.py b/crc/models/protocol_builder.py index 9ff1098f..a91ae84b 100644 --- a/crc/models/protocol_builder.py +++ b/crc/models/protocol_builder.py @@ -22,11 +22,11 @@ class ProtocolBuilderStatus(enum.Enum): # • Hold: store boolean value in CR Connect (add to Study Model) # • Open To Enrollment: has start date and HSR number? # • Abandoned: deleted in PB - INCOMPLETE = 'incomplete' # Found in PB but not ready to start (not q_complete) - ACTIVE = 'active', # found in PB, marked as "q_complete" and no HSR number and not hold - HOLD = 'hold', # CR Connect side, if the Study ias marked as "hold". - OPEN = 'open', # Open To Enrollment: has start date and HSR number? - ABANDONED = 'Abandoned' # Not found in PB + incomplete = 'incomplete' # Found in PB but not ready to start (not q_complete) + active = 'active' # found in PB, marked as "q_complete" and no HSR number and not hold + hold = 'hold' # CR Connect side, if the Study ias marked as "hold". + open = 'open' # Open To Enrollment: has start date and HSR number? + abandoned = 'abandoned' # Not found in PB #DRAFT = 'draft', # !Q_COMPLETE diff --git a/crc/models/study.py b/crc/models/study.py index e14fe0a6..f1ad0099 100644 --- a/crc/models/study.py +++ b/crc/models/study.py @@ -33,11 +33,11 @@ class StudyModel(db.Model): self.user_uid = pbs.NETBADGEID self.last_updated = pbs.DATE_MODIFIED - self.protocol_builder_status = ProtocolBuilderStatus.ACTIVE + self.protocol_builder_status = ProtocolBuilderStatus.active if pbs.HSRNUMBER: - self.protocol_builder_status = ProtocolBuilderStatus.OPEN + self.protocol_builder_status = ProtocolBuilderStatus.open if self.on_hold: - self.protocol_builder_status = ProtocolBuilderStatus.HOLD + self.protocol_builder_status = ProtocolBuilderStatus.hold class WorkflowMetadata(object): diff --git a/crc/services/study_service.py b/crc/services/study_service.py index 4eb8dde7..1d15d361 100644 --- a/crc/services/study_service.py +++ b/crc/services/study_service.py @@ -64,7 +64,7 @@ class StudyService(object): # Calling this line repeatedly is very very slow. It creates the # master spec and runs it. Don't execute this for Abandoned studies, as # we don't have the information to process them. - if study.protocol_builder_status != ProtocolBuilderStatus.ABANDONED: + if study.protocol_builder_status != ProtocolBuilderStatus.abandoned: status = StudyService.__get_study_status(study_model) study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status) @@ -265,7 +265,7 @@ class StudyService(object): for study in db_studies: pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study.id), None) if not pb_study: - study.protocol_builder_status = ProtocolBuilderStatus.ABANDONED + study.protocol_builder_status = ProtocolBuilderStatus.abandoned db.session.commit() diff --git a/migrations/versions/2e7b377cbc7b_.py b/migrations/versions/2e7b377cbc7b_.py new file mode 100644 index 00000000..c0eb5250 --- /dev/null +++ b/migrations/versions/2e7b377cbc7b_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: 2e7b377cbc7b +Revises: c4ddb69e7ef4 +Create Date: 2020-07-28 17:03:23.586828 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2e7b377cbc7b' +down_revision = 'c4ddb69e7ef4' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute('update study set protocol_builder_status = NULL;') + op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;') + op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('incomplete', 'active', 'hold', 'open', 'abandoned')") + op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;") + op.execute('DROP TYPE pbs_old;') + op.execute("update study set protocol_builder_status = 'incomplete';") + +def downgrade(): + op.execute('update study set protocol_builder_status = NULL;') + op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;') + op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('INCOMPLETE', 'ACTIVE', 'HOLD', 'OPEN', 'ABANDONED')") + op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;") + op.execute('DROP TYPE pbs_old;') diff --git a/tests/base_test.py b/tests/base_test.py index 056ce090..af0b1a20 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -60,7 +60,7 @@ class BaseTest(unittest.TestCase): 'id':0, 'title':'The impact of fried pickles on beer consumption in bipedal software developers.', 'last_updated':datetime.datetime.now(), - 'protocol_builder_status':ProtocolBuilderStatus.ACTIVE, + 'protocol_builder_status':ProtocolBuilderStatus.active, 'primary_investigator_id':'dhf8r', 'sponsor':'Sartography Pharmaceuticals', 'ind_number':'1234', @@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase): 'id':1, 'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels', 'last_updated':datetime.datetime.now(), - 'protocol_builder_status':ProtocolBuilderStatus.ACTIVE, + 'protocol_builder_status':ProtocolBuilderStatus.active, 'primary_investigator_id':'dhf8r', 'sponsor':'Makerspace & Co.', 'ind_number':'5678', @@ -241,7 +241,7 @@ class BaseTest(unittest.TestCase): study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first() if study is None: user = self.create_user(uid=uid) - study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.ACTIVE, + study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.active, user_uid=user.uid, primary_investigator_id=primary_investigator_id) db.session.add(study) db.session.commit() diff --git a/tests/data/multi_instance/multi_instance.bpmn b/tests/data/multi_instance/multi_instance.bpmn index 600bea80..1e0d9255 100644 --- a/tests/data/multi_instance/multi_instance.bpmn +++ b/tests/data/multi_instance/multi_instance.bpmn @@ -1,5 +1,5 @@ - + Flow_0t6p1sb @@ -18,7 +18,7 @@ - + SequenceFlow_1p568pp diff --git a/tests/study/test_study_api.py b/tests/study/test_study_api.py index fb0a4dcf..697e90f6 100644 --- a/tests/study/test_study_api.py +++ b/tests/study/test_study_api.py @@ -24,7 +24,7 @@ class TestStudyApi(BaseTest): "title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents " "for Interstellar Spacecraft", "last_updated": datetime.now(tz=timezone.utc), - "protocol_builder_status": ProtocolBuilderStatus.ACTIVE, + "protocol_builder_status": ProtocolBuilderStatus.active, "primary_investigator_id": "tmm2x", "user_uid": "dhf8r", } @@ -135,7 +135,7 @@ class TestStudyApi(BaseTest): self.load_example_data() study: StudyModel = session.query(StudyModel).first() study.title = "Pilot Study of Fjord Placement for Single Fraction Outcomes to Cortisol Susceptibility" - study.protocol_builder_status = ProtocolBuilderStatus.ACTIVE + study.protocol_builder_status = ProtocolBuilderStatus.active rv = self.app.put('/v1.0/study/%i' % study.id, content_type="application/json", headers=self.logged_in_headers(), @@ -185,11 +185,11 @@ class TestStudyApi(BaseTest): num_open = 0 for study in json_data: - if study['protocol_builder_status'] == 'ABANDONED': # One study does not exist in user_studies.json + if study['protocol_builder_status'] == 'abandoned': # One study does not exist in user_studies.json num_abandoned += 1 - if study['protocol_builder_status'] == 'ACTIVE': # One study is marked complete without HSR Number + if study['protocol_builder_status'] == 'active': # One study is marked complete without HSR Number num_active += 1 - if study['protocol_builder_status'] == 'OPEN': # One study is marked complete and has an HSR Number + if study['protocol_builder_status'] == 'open': # One study is marked complete and has an HSR Number num_open += 1 db_studies_after = session.query(StudyModel).all() diff --git a/tests/study/test_study_service.py b/tests/study/test_study_service.py index 7ba5f568..e9711362 100644 --- a/tests/study/test_study_service.py +++ b/tests/study/test_study_service.py @@ -40,7 +40,7 @@ class TestStudyService(BaseTest): for study in db.session.query(StudyModel).all(): StudyService().delete_study(study.id) - study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid) + study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.active, user_uid=user.uid) db.session.add(study) self.load_test_spec("random_fact", category_id=cat.id) diff --git a/tests/test_authentication.py b/tests/test_authentication.py index 7d706949..829d71e3 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -220,7 +220,7 @@ class TestAuthentication(BaseTest): return { "title": "blah", "last_updated": datetime.now(tz=timezone.utc), - "protocol_builder_status": ProtocolBuilderStatus.ACTIVE, + "protocol_builder_status": ProtocolBuilderStatus.active, "primary_investigator_id": uid, "user_uid": uid, } From 63537d7765f885c6621d64e21ef203179f4fb586 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 29 Jul 2020 22:45:56 -0400 Subject: [PATCH 22/31] Adds is_admin boolean flag to user schema --- crc/models/user.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crc/models/user.py b/crc/models/user.py index e621455b..eb431c95 100644 --- a/crc/models/user.py +++ b/crc/models/user.py @@ -1,6 +1,7 @@ import datetime import jwt +from marshmallow import fields from marshmallow_sqlalchemy import SQLAlchemyAutoSchema from crc import db, app @@ -18,6 +19,7 @@ class UserModel(db.Model): first_name = db.Column(db.String, nullable=True) last_name = db.Column(db.String, nullable=True) title = db.Column(db.String, nullable=True) + # TODO: Add Department and School def is_admin(self): @@ -64,3 +66,4 @@ class UserModelSchema(SQLAlchemyAutoSchema): load_instance = True include_relationships = True + is_admin = fields.Function(lambda obj: obj.is_admin()) From d9a91c891f91443dee0db7b9891837882f993f21 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 29 Jul 2020 22:46:22 -0400 Subject: [PATCH 23/31] Updates package hashes --- Pipfile.lock | 54 +++++++++++++++++++++++++++++----------------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 5f5042bf..dd3e80bb 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -611,25 +611,25 @@ }, "pandas": { "hashes": [ - "sha256:02f1e8f71cd994ed7fcb9a35b6ddddeb4314822a0e09a9c5b2d278f8cb5d4096", - "sha256:13f75fb18486759da3ff40f5345d9dd20e7d78f2a39c5884d013456cec9876f0", - "sha256:35b670b0abcfed7cad76f2834041dcf7ae47fd9b22b63622d67cdc933d79f453", - "sha256:4c73f373b0800eb3062ffd13d4a7a2a6d522792fa6eb204d67a4fad0a40f03dc", - "sha256:5759edf0b686b6f25a5d4a447ea588983a33afc8a0081a0954184a4a87fd0dd7", - "sha256:5a7cf6044467c1356b2b49ef69e50bf4d231e773c3ca0558807cdba56b76820b", - "sha256:69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8", - "sha256:8778a5cc5a8437a561e3276b85367412e10ae9fff07db1eed986e427d9a674f8", - "sha256:9871ef5ee17f388f1cb35f76dc6106d40cb8165c562d573470672f4cdefa59ef", - "sha256:9c31d52f1a7dd2bb4681d9f62646c7aa554f19e8e9addc17e8b1b20011d7522d", - "sha256:ab8173a8efe5418bbe50e43f321994ac6673afc5c7c4839014cf6401bbdd0705", - "sha256:ae961f1f0e270f1e4e2273f6a539b2ea33248e0e3a11ffb479d757918a5e03a9", - "sha256:b3c4f93fcb6e97d993bf87cdd917883b7dab7d20c627699f360a8fb49e9e0b91", - "sha256:c9410ce8a3dee77653bc0684cfa1535a7f9c291663bd7ad79e39f5ab58f67ab3", - "sha256:f69e0f7b7c09f1f612b1f8f59e2df72faa8a6b41c5a436dde5b615aaf948f107", - "sha256:faa42a78d1350b02a7d2f0dbe3c80791cf785663d6997891549d0f86dc49125e" + "sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb", + "sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e", + "sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d", + "sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798", + "sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4", + "sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2", + "sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203", + "sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8", + "sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e", + "sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1", + "sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088", + "sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8", + "sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2", + "sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273", + "sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f", + "sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25" ], "index": "pypi", - "version": "==1.0.5" + "version": "==1.1.0" }, "psycopg2-binary": { "hashes": [ @@ -1122,6 +1122,12 @@ "markers": "python_version < '3.8'", "version": "==1.7.0" }, + "iniconfig": { + "hashes": [ + "sha256:aa0b40f50a00e72323cb5d41302f9c6165728fd764ac8822aa3fff00a40d56b4" + ], + "version": "==1.0.0" + }, "more-itertools": { "hashes": [ "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", @@ -1172,11 +1178,11 @@ }, "pytest": { "hashes": [ - "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1", - "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8" + "sha256:869ec27f9b89964ccfe4fbdd5ccb8d3f285aaa3e9aa16a8491b9c8829148c230", + "sha256:a64d8fb4c15cdc70dae047352e980a197d855747cc885eb332cb73ddcc769168" ], "index": "pypi", - "version": "==5.4.3" + "version": "==6.0.0" }, "six": { "hashes": [ @@ -1186,12 +1192,12 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, - "wcwidth": { + "toml": { "hashes": [ - "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", - "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" + "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", + "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" ], - "version": "==0.2.5" + "version": "==0.10.1" }, "zipp": { "hashes": [ From d301e9e6fa3efb709eda65d3cb4aee73f20883d4 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Wed, 29 Jul 2020 22:47:47 -0400 Subject: [PATCH 24/31] Adds list_users endpoint. Adds admin impersonate uid parameter to user endpoint. Adds some utility methods to user service. Refactors authentication tests. --- crc/api.yml | 26 ++++- crc/api/user.py | 24 +++-- crc/services/user_service.py | 43 ++++++-- tests/base_test.py | 114 ++++++++++++--------- tests/test_authentication.py | 191 ++++++++++++++++++++++++++--------- 5 files changed, 290 insertions(+), 108 deletions(-) diff --git a/crc/api.yml b/crc/api.yml index b3d61fc1..68f2f12a 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -31,6 +31,13 @@ paths: '304': description: Redirection to the hosted frontend with an auth_token header. /user: + parameters: + - name: admin_impersonate_uid + in: query + required: false + description: For admins, the unique uid of an existing user to impersonate. + schema: + type: string get: operationId: crc.api.user.get_current_user summary: Returns the current user. @@ -38,11 +45,27 @@ paths: - Users responses: '200': - description: The currently authenticated user. + description: The currently-authenticated user, or, if the current user is an admin and admin_impersonate_uid is provided, this will be the user with the given uid. content: application/json: schema: $ref: "#/components/schemas/User" + /list_users: + get: + operationId: crc.api.user.get_all_users + security: + - auth_admin: ['secret'] + summary: Returns a list of all users in the database. + tags: + - Users + responses: + '200': + description: All users in the database. + content: + application/json: + schema: + type: array + $ref: "#/components/schemas/User" # /v1.0/study /study: get: @@ -56,6 +79,7 @@ paths: content: application/json: schema: + type: array $ref: "#/components/schemas/Study" post: operationId: crc.api.study.add_study diff --git a/crc/api/user.py b/crc/api/user.py index 49b447ac..483edd65 100644 --- a/crc/api/user.py +++ b/crc/api/user.py @@ -5,6 +5,7 @@ from crc import app, db from crc.api.common import ApiError from crc.models.user import UserModel, UserModelSchema from crc.services.ldap_service import LdapService, LdapModel +from crc.services.user_service import UserService """ .. module:: crc.api.user @@ -56,8 +57,9 @@ def verify_token(token=None): return token_info else: - raise ApiError("no_user", "User not found. Please login via the frontend app before accessing this feature.", - status_code=403) + raise ApiError("no_user", + "User not found. Please login via the frontend app before accessing this feature.", + status_code=403) else: # Fall back to a default user if this is not production. @@ -67,7 +69,6 @@ def verify_token(token=None): return token_info - def verify_token_admin(token=None): """ Verifies the token for the user (if provided) in non-production environment. @@ -85,8 +86,20 @@ def verify_token_admin(token=None): token_info = UserModel.decode_auth_token(token) return token_info -def get_current_user(): - return UserModelSchema().dump(g.user) + +def get_current_user(admin_impersonate_uid=None): + if UserService.has_user(): + if admin_impersonate_uid is not None and UserService.user_is_admin(): + UserService.impersonate(admin_impersonate_uid) + + user = UserService.current_user(UserService.admin_is_impersonating()) + return UserModelSchema().dump(user) + + +def get_all_users(): + if "user" in g and g.user.is_admin(): + all_users = db.session.query(UserModel).all() + return UserModelSchema(many=True).dump(all_users) def login( @@ -129,7 +142,6 @@ def login( # X-Forwarded-Server: dev.crconnect.uvadcos.io # Connection: Keep-Alive - # If we're in production, override any uid with the uid from the SSO request headers if _is_production(): uid = _get_request_uid(request) diff --git a/crc/services/user_service.py b/crc/services/user_service.py index 6b2887f5..5d12601a 100644 --- a/crc/services/user_service.py +++ b/crc/services/user_service.py @@ -1,31 +1,62 @@ from flask import g +from crc import db from crc.api.common import ApiError +from crc.models.user import UserModel class UserService(object): """Provides common tools for working with users""" + # Returns true if the current user is logged in. @staticmethod def has_user(): - if 'user' not in g or not g.user: - return False - else: - return True + return 'user' in g and bool(g.user) + + # Returns true if the current user is an admin. + @staticmethod + def user_is_admin(): + return UserService.has_user() and g.user.is_admin() + + # Returns true if the current admin user is impersonating another user. + @staticmethod + def admin_is_impersonating(): + return UserService.user_is_admin() and \ + "impersonate_user" in g and \ + g.impersonate_user is not None + + # Returns true if the given user uid is different from the current user's uid. + @staticmethod + def is_different_user(uid): + return UserService.has_user() and uid is not None and uid is not g.user.uid @staticmethod def current_user(allow_admin_impersonate=False): - if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) # Admins can pretend to be different users and act on a users behalf in # some circumstances. - if g.user.is_admin() and allow_admin_impersonate and "impersonate_user" in g: + if allow_admin_impersonate and UserService.admin_is_impersonating(): return g.impersonate_user else: return g.user + # Admins can pretend to be different users and act on a users behalf in some circumstances. + # This method allows an admin user to start impersonating another user with the given uid. + # Stops impersonating if the uid is None or invalid. + @staticmethod + def impersonate(uid=None): + # Clear out the current impersonating user. + g.impersonate_user = None + + if not UserService.has_user(): + raise ApiError("logged_out", "You are no longer logged in.", status_code=401) + + if not UserService.admin_is_impersonating() and UserService.is_different_user(uid): + # Impersonate the user if the given uid is valid. + g.impersonate_user = db.session.query(UserModel).filter(UserModel.uid == uid).first() + @staticmethod def in_list(uids, allow_admin_impersonate=False): """Returns true if the current user's id is in the given list of ids. False if there diff --git a/tests/base_test.py b/tests/base_test.py index 81ccc7bb..f5b66aa9 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -2,8 +2,6 @@ # IMPORTANT - Environment must be loaded before app, models, etc.... import os -from crc.services.user_service import UserService - os.environ["TESTING"] = "true" import json @@ -18,17 +16,19 @@ from crc.models.api_models import WorkflowApiSchema, MultiInstanceType from crc.models.approval import ApprovalModel, ApprovalStatus from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES from crc.models.protocol_builder import ProtocolBuilderStatus -from crc.models.task_event import TaskEventModel from crc.models.study import StudyModel +from crc.models.task_event import TaskEventModel from crc.models.user import UserModel -from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel +from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel from crc.services.file_service import FileService from crc.services.study_service import StudyService +from crc.services.user_service import UserService from crc.services.workflow_service import WorkflowService from example_data import ExampleDataLoader -#UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES +# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES import logging + logging.basicConfig() @@ -39,48 +39,57 @@ class BaseTest(unittest.TestCase): if not app.config['TESTING']: raise (Exception("INVALID TEST CONFIGURATION. This is almost always in import order issue." - "The first class to import in each test should be the base_test.py file.")) + "The first class to import in each test should be the base_test.py file.")) auths = {} test_uid = "dhf8r" users = [ { - 'uid':'dhf8r', - 'email_address':'dhf8r@virginia.EDU', - 'display_name':'Daniel Harold Funk', - 'affiliation':'staff@virginia.edu;member@virginia.edu', - 'eppn':'dhf8r@virginia.edu', - 'first_name':'Daniel', - 'last_name':'Funk', - 'title':'SOFTWARE ENGINEER V' - } + 'uid': 'dhf8r', + 'email_address': 'dhf8r@virginia.EDU', + 'display_name': 'Daniel Harold Funk', + 'affiliation': 'staff@virginia.edu;member@virginia.edu', + 'eppn': 'dhf8r@virginia.edu', + 'first_name': 'Daniel', + 'last_name': 'Funk', + 'title': 'SOFTWARE ENGINEER V' + }, + { + 'uid': 'lbd3p', + 'email_address': 'lbd3p@virginia.EDU', + 'display_name': 'Laura Barnes', + 'affiliation': 'staff@virginia.edu;member@virginia.edu', + 'eppn': 'lbd3p@virginia.edu', + 'first_name': 'Laura', + 'last_name': 'Barnes', + 'title': 'Associate Professor of Systems and Information Engineering' + }, ] studies = [ { - 'id':0, - 'title':'The impact of fried pickles on beer consumption in bipedal software developers.', - 'last_updated':datetime.datetime.now(), - 'protocol_builder_status':ProtocolBuilderStatus.active, - 'primary_investigator_id':'dhf8r', - 'sponsor':'Sartography Pharmaceuticals', - 'ind_number':'1234', - 'user_uid':'dhf8r' + 'id': 0, + 'title': 'The impact of fried pickles on beer consumption in bipedal software developers.', + 'last_updated': datetime.datetime.now(), + 'protocol_builder_status': ProtocolBuilderStatus.active, + 'primary_investigator_id': 'dhf8r', + 'sponsor': 'Sartography Pharmaceuticals', + 'ind_number': '1234', + 'user_uid': 'dhf8r' }, { - 'id':1, - 'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels', - 'last_updated':datetime.datetime.now(), - 'protocol_builder_status':ProtocolBuilderStatus.active, - 'primary_investigator_id':'dhf8r', - 'sponsor':'Makerspace & Co.', - 'ind_number':'5678', - 'user_uid':'dhf8r' + 'id': 1, + 'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels', + 'last_updated': datetime.datetime.now(), + 'protocol_builder_status': ProtocolBuilderStatus.active, + 'primary_investigator_id': 'dhf8r', + 'sponsor': 'Makerspace & Co.', + 'ind_number': '5678', + 'user_uid': 'dhf8r' } ] - @classmethod def setUpClass(cls): app.config.from_object('config.testing') @@ -100,7 +109,11 @@ class BaseTest(unittest.TestCase): def tearDown(self): ExampleDataLoader.clean_db() - g.user = None + self.logout() + + if 'impersonate_user' in g: + g.impersonate_user = None + self.auths = {} def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'): @@ -138,8 +151,13 @@ class BaseTest(unittest.TestCase): else: ExampleDataLoader().load_test_data() - for user_json in self.users: - db.session.add(UserModel(**user_json)) + # If in production mode, only add the first user. + if app.config['PRODUCTION']: + db.session.add(UserModel(**self.users[0])) + else: + for user_json in self.users: + db.session.add(UserModel(**user_json)) + db.session.commit() for study_json in self.studies: study_model = StudyModel(**study_json) @@ -220,7 +238,6 @@ class BaseTest(unittest.TestCase): return '?%s' % '&'.join(query_string_list) - def replace_file(self, name, file_path): """Replaces a stored file with the given name with the contents of the file at the given path.""" file_service = FileService() @@ -240,7 +257,8 @@ class BaseTest(unittest.TestCase): db.session.commit() return user - def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", primary_investigator_id="lb3dp"): + def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", + primary_investigator_id="lb3dp"): study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first() if study is None: user = self.create_user(uid=uid) @@ -294,19 +312,20 @@ class BaseTest(unittest.TestCase): file.close() def create_approval( - self, - study=None, - workflow=None, - approver_uid=None, - status=None, - version=None, + self, + study=None, + workflow=None, + approver_uid=None, + status=None, + version=None, ): study = study or self.create_study() workflow = workflow or self.create_workflow() approver_uid = approver_uid or self.test_uid status = status or ApprovalStatus.PENDING.value version = version or 1 - approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, version=version) + approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, + version=version) db.session.add(approval) db.session.commit() return approval @@ -326,7 +345,6 @@ class BaseTest(unittest.TestCase): self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id) return workflow_api - def complete_form(self, workflow_in, task_in, dict_data, error_code=None, terminate_loop=None, user_uid="dhf8r"): prev_completed_task_count = workflow_in.completed_tasks if isinstance(task_in, dict): @@ -391,12 +409,14 @@ class BaseTest(unittest.TestCase): self.assertEqual(task_in.multi_instance_count, event.mi_count) if task_in.multi_instance_type == 'looping' and not terminate_loop: - self.assertEqual(task_in.multi_instance_index+1, event.mi_index) + self.assertEqual(task_in.multi_instance_index + 1, event.mi_index) else: self.assertEqual(task_in.multi_instance_index, event.mi_index) self.assertEqual(task_in.process_name, event.process_name) self.assertIsNotNone(event.date) - workflow = WorkflowApiSchema().load(json_data) return workflow + + def logout(self): + g.user = None diff --git a/tests/test_authentication.py b/tests/test_authentication.py index 829d71e3..61f578a0 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -13,6 +13,8 @@ from crc.models.user import UserModel class TestAuthentication(BaseTest): + admin_uid = 'dhf8r' + non_admin_uid = 'lb3dp' def tearDown(self): # Assure we set the production flag back to false. @@ -58,7 +60,7 @@ class TestAuthentication(BaseTest): self.assertTrue(expected_exp_3 - 1000 <= actual_exp_3 <= expected_exp_3 + 1000) def test_non_production_auth_creates_user(self): - new_uid = 'lb3dp' ## Assure this user id is in the fake responses from ldap. + new_uid = self.non_admin_uid ## Assure this user id is in the fake responses from ldap. self.load_example_data() user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first() self.assertIsNone(user) @@ -88,21 +90,20 @@ class TestAuthentication(BaseTest): self.load_example_data() - new_uid = 'lb3dp' # This user is in the test ldap system. - user = db.session.query(UserModel).filter_by(uid=new_uid).first() + # User should not be in the system yet. + user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() self.assertIsNone(user) - redirect_url = 'http://worlds.best.website/admin' - headers = dict(Uid=new_uid) - db.session.flush() - rv = self.app.get('v1.0/login', follow_redirects=False, headers=headers) - self.assert_success(rv) - user = db.session.query(UserModel).filter_by(uid=new_uid).first() - self.assertIsNotNone(user) - self.assertEqual(new_uid, user.uid) - self.assertEqual("Laura Barnes", user.display_name) - self.assertEqual("lb3dp@virginia.edu", user.email_address) - self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title) + # Log in + non_admin_user = self._login_as_non_admin() + + # User should be in the system now. + redirect_url = 'http://worlds.best.website/admin' + rv_user = self.app.get('/v1.0/user', headers=self.logged_in_headers(non_admin_user, redirect_url=redirect_url)) + self.assert_success(rv_user) + user_data = json.loads(rv_user.get_data(as_text=True)) + self.assertEqual(self.non_admin_uid, user_data['uid']) + self.assertFalse(user_data['is_admin']) # Switch production mode back off app.config['PRODUCTION'] = False @@ -119,6 +120,8 @@ class TestAuthentication(BaseTest): user = UserModel(uid="dhf8r", first_name='Dan', last_name='Funk', email_address='dhf8r@virginia.edu') rv = self.app.get('/v1.0/user', headers=self.logged_in_headers(user, redirect_url='http://omg.edu/lolwut')) self.assert_success(rv) + user_data = json.loads(rv.get_data(as_text=True)) + self.assertTrue(user_data['is_admin']) def test_admin_can_access_admin_only_endpoints(self): # Switch production mode on @@ -126,21 +129,8 @@ class TestAuthentication(BaseTest): self.load_example_data() - admin_uids = app.config['ADMIN_UIDS'] - self.assertGreater(len(admin_uids), 0) - admin_uid = admin_uids[0] - self.assertEqual(admin_uid, 'dhf8r') # This user is in the test ldap system. - admin_headers = dict(Uid=admin_uid) - - rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers) - self.assert_success(rv) - - admin_user = db.session.query(UserModel).filter(UserModel.uid == admin_uid).first() - self.assertIsNotNone(admin_user) - self.assertEqual(admin_uid, admin_user.uid) - - admin_study = self._make_fake_study(admin_uid) - + admin_user = self._login_as_admin() + admin_study = self._make_fake_study(admin_user.uid) admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) rv_add_study = self.app.post( @@ -173,26 +163,9 @@ class TestAuthentication(BaseTest): self.load_example_data() # Non-admin user should not be able to delete a study - non_admin_uid = 'lb3dp' - admin_uids = app.config['ADMIN_UIDS'] - self.assertGreater(len(admin_uids), 0) - self.assertNotIn(non_admin_uid, admin_uids) - - non_admin_headers = dict(Uid=non_admin_uid) - - rv = self.app.get( - 'v1.0/login', - follow_redirects=False, - headers=non_admin_headers - ) - self.assert_success(rv) - - non_admin_user = db.session.query(UserModel).filter_by(uid=non_admin_uid).first() - self.assertIsNotNone(non_admin_user) - + non_admin_user = self._login_as_non_admin() non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) - - non_admin_study = self._make_fake_study(non_admin_uid) + non_admin_study = self._make_fake_study(non_admin_user.uid) rv_add_study = self.app.post( '/v1.0/study', @@ -216,6 +189,89 @@ class TestAuthentication(BaseTest): # Switch production mode back off app.config['PRODUCTION'] = False + def test_list_all_users(self): + self.load_example_data() + rv = self.app.get('/v1.0/user') + self.assert_failure(rv, 401) + + rv = self.app.get('/v1.0/user', headers=self.logged_in_headers()) + self.assert_success(rv) + + all_users = db.session.query(UserModel).all() + + rv = self.app.get('/v1.0/list_users', headers=self.logged_in_headers()) + self.assert_success(rv) + user_data = json.loads(rv.get_data(as_text=True)) + self.assertEqual(len(user_data), len(all_users)) + + def test_admin_can_impersonate_another_user(self): + # Switch production mode on + app.config['PRODUCTION'] = True + + self.load_example_data() + + admin_user = self._login_as_admin() + admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) + + # User should not be in the system yet. + non_admin_user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() + self.assertIsNone(non_admin_user) + + # Admin should not be able to impersonate non-existent user + rv_1 = self.app.get( + '/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid, + content_type="application/json", + headers=admin_token_headers, + follow_redirects=False + ) + self.assert_success(rv_1) + user_data_1 = json.loads(rv_1.get_data(as_text=True)) + self.assertEqual(user_data_1['uid'], self.admin_uid, 'Admin user should be logged in as themselves') + + # Add the non-admin user now + self.logout() + non_admin_user = self._login_as_non_admin() + self.assertEqual(non_admin_user.uid, self.non_admin_uid) + non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) + + # Add a study for the non-admin user + non_admin_study = self._make_fake_study(self.non_admin_uid) + rv_add_study = self.app.post( + '/v1.0/study', + content_type="application/json", + headers=non_admin_token_headers, + data=json.dumps(StudySchema().dump(non_admin_study)) + ) + self.assert_success(rv_add_study, 'Non-admin user should be able to add a study') + self.logout() + + # Admin should be able to impersonate user now + admin_user = self._login_as_admin() + rv_2 = self.app.get( + '/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid, + content_type="application/json", + headers=admin_token_headers, + follow_redirects=False + ) + self.assert_success(rv_2) + user_data_2 = json.loads(rv_2.get_data(as_text=True)) + self.assertEqual(user_data_2['uid'], self.non_admin_uid, 'Admin user should impersonate non-admin user') + + # Study endpoint should return non-admin user's studies + rv_study = self.app.get( + '/v1.0/study', + content_type="application/json", + headers=admin_token_headers, + follow_redirects=False + ) + self.assert_success(rv_study, 'Admin user should be able to get impersonated user studies') + study_data = json.loads(rv_study.get_data(as_text=True)) + self.assertGreaterEqual(len(study_data), 1) + self.assertEqual(study_data[0]['user_uid'], self.non_admin_uid) + + # Switch production mode back off + app.config['PRODUCTION'] = False + def _make_fake_study(self, uid): return { "title": "blah", @@ -224,3 +280,42 @@ class TestAuthentication(BaseTest): "primary_investigator_id": uid, "user_uid": uid, } + + def _login_as_admin(self): + admin_uids = app.config['ADMIN_UIDS'] + self.assertGreater(len(admin_uids), 0) + self.assertIn(self.admin_uid, admin_uids) + admin_headers = dict(Uid=self.admin_uid) + + rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers) + self.assert_success(rv) + + admin_user = db.session.query(UserModel).filter(UserModel.uid == self.admin_uid).first() + self.assertIsNotNone(admin_user) + self.assertEqual(self.admin_uid, admin_user.uid) + self.assertTrue(admin_user.is_admin()) + return admin_user + + def _login_as_non_admin(self): + admin_uids = app.config['ADMIN_UIDS'] + self.assertGreater(len(admin_uids), 0) + self.assertNotIn(self.non_admin_uid, admin_uids) + + non_admin_headers = dict(Uid=self.non_admin_uid) + + rv = self.app.get( + 'v1.0/login?uid=' + self.non_admin_uid, + follow_redirects=False, + headers=non_admin_headers + ) + self.assert_success(rv) + + user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() + self.assertIsNotNone(user) + self.assertFalse(user.is_admin()) + self.assertIsNotNone(user) + self.assertEqual(self.non_admin_uid, user.uid) + self.assertEqual("Laura Barnes", user.display_name) + self.assertEqual("lb3dp@virginia.edu", user.email_address) + self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title) + return user From 1b0ebecbf4e0ee19726a09f20072be05a3f15dc3 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Thu, 30 Jul 2020 10:17:02 -0400 Subject: [PATCH 25/31] Uses Flask session to store impersonation state. --- crc/models/user.py | 4 +++- crc/services/user_service.py | 15 ++++++++++----- tests/base_test.py | 6 +----- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/crc/models/user.py b/crc/models/user.py index eb431c95..f047761a 100644 --- a/crc/models/user.py +++ b/crc/models/user.py @@ -65,5 +65,7 @@ class UserModelSchema(SQLAlchemyAutoSchema): model = UserModel load_instance = True include_relationships = True + is_admin = fields.Method('get_is_admin', dump_only=True) - is_admin = fields.Function(lambda obj: obj.is_admin()) + def get_is_admin(self, obj): + return obj.is_admin() diff --git a/crc/services/user_service.py b/crc/services/user_service.py index 5d12601a..c4362a65 100644 --- a/crc/services/user_service.py +++ b/crc/services/user_service.py @@ -1,4 +1,4 @@ -from flask import g +from flask import g, session from crc import db from crc.api.common import ApiError @@ -22,8 +22,8 @@ class UserService(object): @staticmethod def admin_is_impersonating(): return UserService.user_is_admin() and \ - "impersonate_user" in g and \ - g.impersonate_user is not None + "admin_impersonate_uid" in session and \ + session.get('admin_impersonate_uid') is not None # Returns true if the given user uid is different from the current user's uid. @staticmethod @@ -35,20 +35,21 @@ class UserService(object): if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) - # Admins can pretend to be different users and act on a users behalf in + # Admins can pretend to be different users and act on a user's behalf in # some circumstances. if allow_admin_impersonate and UserService.admin_is_impersonating(): return g.impersonate_user else: return g.user - # Admins can pretend to be different users and act on a users behalf in some circumstances. + # Admins can pretend to be different users and act on a user's behalf in some circumstances. # This method allows an admin user to start impersonating another user with the given uid. # Stops impersonating if the uid is None or invalid. @staticmethod def impersonate(uid=None): # Clear out the current impersonating user. g.impersonate_user = None + session.pop('admin_impersonate_uid', None) if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) @@ -57,6 +58,10 @@ class UserService(object): # Impersonate the user if the given uid is valid. g.impersonate_user = db.session.query(UserModel).filter(UserModel.uid == uid).first() + # Store the uid in the session. + if g.impersonate_user: + session['admin_impersonate_uid'] = uid + @staticmethod def in_list(uids, allow_admin_impersonate=False): """Returns true if the current user's id is in the given list of ids. False if there diff --git a/tests/base_test.py b/tests/base_test.py index f5b66aa9..d569af6f 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -8,7 +8,7 @@ import json import unittest import urllib.parse import datetime -from flask import g +from flask import g, session as flask_session from sqlalchemy import Sequence from crc import app, db, session @@ -110,10 +110,6 @@ class BaseTest(unittest.TestCase): def tearDown(self): ExampleDataLoader.clean_db() self.logout() - - if 'impersonate_user' in g: - g.impersonate_user = None - self.auths = {} def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'): From aa0f24bd33f9b14464542752f06612518abbde8f Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Thu, 30 Jul 2020 10:40:06 -0400 Subject: [PATCH 26/31] Fully deletes users from g in test tearDown --- crc/services/user_service.py | 10 ++++++---- tests/base_test.py | 15 ++++++++++++++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/crc/services/user_service.py b/crc/services/user_service.py index c4362a65..d29ea5f8 100644 --- a/crc/services/user_service.py +++ b/crc/services/user_service.py @@ -21,6 +21,7 @@ class UserService(object): # Returns true if the current admin user is impersonating another user. @staticmethod def admin_is_impersonating(): + print("session.get('admin_impersonate_uid')", session.get('admin_impersonate_uid')) return UserService.user_is_admin() and \ "admin_impersonate_uid" in session and \ session.get('admin_impersonate_uid') is not None @@ -37,7 +38,7 @@ class UserService(object): # Admins can pretend to be different users and act on a user's behalf in # some circumstances. - if allow_admin_impersonate and UserService.admin_is_impersonating(): + if allow_admin_impersonate and UserService.admin_is_impersonating() and 'impersonate_user' in g: return g.impersonate_user else: return g.user @@ -47,9 +48,10 @@ class UserService(object): # Stops impersonating if the uid is None or invalid. @staticmethod def impersonate(uid=None): - # Clear out the current impersonating user. - g.impersonate_user = None - session.pop('admin_impersonate_uid', None) + # if uid is None: + # # Clear out the current impersonating user. + # g.impersonate_user = None + # session.pop('admin_impersonate_uid', None) if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) diff --git a/tests/base_test.py b/tests/base_test.py index d569af6f..747949f3 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -415,4 +415,17 @@ class BaseTest(unittest.TestCase): return workflow def logout(self): - g.user = None + print("logout before 'user' in g", 'user' in g) + print('logout before flask_session', flask_session) + print("logout before 'impersonate_user' in g", 'impersonate_user' in g) + + if 'user' in g: + del g.user + + flask_session.clear() + if 'impersonate_user' in g: + del g.impersonate_user + + print("logout after 'user' in g", 'user' in g) + print('logout after flask_session', flask_session) + print("logout after 'impersonate_user' in g", 'impersonate_user' in g) From faba0f55ab31435e1e8824981f17c5713c315434 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Thu, 30 Jul 2020 12:40:53 -0400 Subject: [PATCH 27/31] Adds AdminSession model and refactors impersonation methods to use it. --- crc/api/user.py | 48 ++++++++++++---- crc/models/user.py | 7 +++ crc/services/user_service.py | 84 +++++++++++++++++++++------- migrations/versions/ab06a94e5d4c_.py | 34 +++++++++++ tests/base_test.py | 47 +++++++--------- tests/test_authentication.py | 24 ++++---- 6 files changed, 171 insertions(+), 73 deletions(-) create mode 100644 migrations/versions/ab06a94e5d4c_.py diff --git a/crc/api/user.py b/crc/api/user.py index 483edd65..5992626b 100644 --- a/crc/api/user.py +++ b/crc/api/user.py @@ -1,7 +1,7 @@ import flask from flask import g, request -from crc import app, db +from crc import app, session from crc.api.common import ApiError from crc.models.user import UserModel, UserModelSchema from crc.services.ldap_service import LdapService, LdapModel @@ -36,6 +36,10 @@ def verify_token(token=None): try: token_info = UserModel.decode_auth_token(token) g.user = UserModel.query.filter_by(uid=token_info['sub']).first() + + # If the user is valid, store the token for this session + if g.user: + g.token = token except: raise failure_error if g.user is not None: @@ -50,9 +54,11 @@ def verify_token(token=None): if uid is not None: db_user = UserModel.query.filter_by(uid=uid).first() + # If the user is valid, store the user and token for this session if db_user is not None: g.user = db_user token = g.user.encode_auth_token().decode() + g.token = token token_info = UserModel.decode_auth_token(token) return token_info @@ -87,18 +93,36 @@ def verify_token_admin(token=None): return token_info -def get_current_user(admin_impersonate_uid=None): - if UserService.has_user(): - if admin_impersonate_uid is not None and UserService.user_is_admin(): - UserService.impersonate(admin_impersonate_uid) +def start_impersonating(uid): + if uid is not None and UserService.user_is_admin(): + UserService.start_impersonating(uid) - user = UserService.current_user(UserService.admin_is_impersonating()) + user = UserService.current_user(allow_admin_impersonate=True) + return UserModelSchema().dump(user) + + +def stop_impersonating(): + if UserService.user_is_admin(): + UserService.stop_impersonating() + + user = UserService.current_user(allow_admin_impersonate=False) + return UserModelSchema().dump(user) + + +def get_current_user(admin_impersonate_uid=None): + if UserService.user_is_admin(): + if admin_impersonate_uid is not None: + UserService.start_impersonating(admin_impersonate_uid) + else: + UserService.stop_impersonating() + + user = UserService.current_user(UserService.user_is_admin() and UserService.admin_is_impersonating()) return UserModelSchema().dump(user) def get_all_users(): if "user" in g and g.user.is_admin(): - all_users = db.session.query(UserModel).all() + all_users = session.query(UserModel).all() return UserModelSchema(many=True).dump(all_users) @@ -189,6 +213,8 @@ def _handle_login(user_info: LdapModel, redirect_url=None): # Return the frontend auth callback URL, with auth token appended. auth_token = user.encode_auth_token().decode() + g.token = auth_token + if redirect_url is not None: if redirect_url.find("http://") != 0 and redirect_url.find("https://") != 0: redirect_url = "http://" + redirect_url @@ -201,13 +227,13 @@ def _handle_login(user_info: LdapModel, redirect_url=None): def _upsert_user(user_info): - user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).first() + user = session.query(UserModel).filter(UserModel.uid == user_info.uid).first() if user is None: # Add new user user = UserModel() else: - user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first() + user = session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first() user.uid = user_info.uid user.display_name = user_info.display_name @@ -215,8 +241,8 @@ def _upsert_user(user_info): user.affiliation = user_info.affiliation user.title = user_info.title - db.session.add(user) - db.session.commit() + session.add(user) + session.commit() return user diff --git a/crc/models/user.py b/crc/models/user.py index f047761a..5b6c5dbb 100644 --- a/crc/models/user.py +++ b/crc/models/user.py @@ -69,3 +69,10 @@ class UserModelSchema(SQLAlchemyAutoSchema): def get_is_admin(self, obj): return obj.is_admin() + + +class AdminSessionModel(db.Model): + __tablename__ = 'admin_session' + id = db.Column(db.Integer, primary_key=True) + token = db.Column(db.String, unique=True) + admin_impersonate_uid = db.Column(db.String) diff --git a/crc/services/user_service.py b/crc/services/user_service.py index d29ea5f8..56a360ee 100644 --- a/crc/services/user_service.py +++ b/crc/services/user_service.py @@ -1,8 +1,8 @@ -from flask import g, session +from flask import g -from crc import db +from crc import session from crc.api.common import ApiError -from crc.models.user import UserModel +from crc.models.user import UserModel, AdminSessionModel class UserService(object): @@ -11,7 +11,10 @@ class UserService(object): # Returns true if the current user is logged in. @staticmethod def has_user(): - return 'user' in g and bool(g.user) + return 'token' in g and \ + bool(g.token) and \ + 'user' in g and \ + bool(g.user) # Returns true if the current user is an admin. @staticmethod @@ -21,10 +24,12 @@ class UserService(object): # Returns true if the current admin user is impersonating another user. @staticmethod def admin_is_impersonating(): - print("session.get('admin_impersonate_uid')", session.get('admin_impersonate_uid')) - return UserService.user_is_admin() and \ - "admin_impersonate_uid" in session and \ - session.get('admin_impersonate_uid') is not None + if UserService.user_is_admin(): + adminSession: AdminSessionModel = UserService.get_admin_session() + return adminSession is not None + + else: + raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403) # Returns true if the given user uid is different from the current user's uid. @staticmethod @@ -32,14 +37,14 @@ class UserService(object): return UserService.has_user() and uid is not None and uid is not g.user.uid @staticmethod - def current_user(allow_admin_impersonate=False): + def current_user(allow_admin_impersonate=False) -> UserModel: if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) # Admins can pretend to be different users and act on a user's behalf in # some circumstances. - if allow_admin_impersonate and UserService.admin_is_impersonating() and 'impersonate_user' in g: - return g.impersonate_user + if UserService.user_is_admin() and allow_admin_impersonate and UserService.admin_is_impersonating(): + return UserService.get_admin_session_user() else: return g.user @@ -47,22 +52,42 @@ class UserService(object): # This method allows an admin user to start impersonating another user with the given uid. # Stops impersonating if the uid is None or invalid. @staticmethod - def impersonate(uid=None): - # if uid is None: - # # Clear out the current impersonating user. - # g.impersonate_user = None - # session.pop('admin_impersonate_uid', None) - + def start_impersonating(uid=None): if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) + if not UserService.user_is_admin(): + raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403) + + if uid is None: + raise ApiError("invalid_uid", "Please provide a valid user uid.") + if not UserService.admin_is_impersonating() and UserService.is_different_user(uid): # Impersonate the user if the given uid is valid. - g.impersonate_user = db.session.query(UserModel).filter(UserModel.uid == uid).first() + impersonate_user = session.query(UserModel).filter(UserModel.uid == uid).first() - # Store the uid in the session. - if g.impersonate_user: - session['admin_impersonate_uid'] = uid + if impersonate_user is not None: + g.impersonate_user = impersonate_user + + # Store the uid and user session token. + session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid)) + session.commit() + else: + raise ApiError("invalid_uid", "The uid provided is not valid.") + + @staticmethod + def stop_impersonating(): + if not UserService.has_user(): + raise ApiError("logged_out", "You are no longer logged in.", status_code=401) + + # Clear out the current impersonating user. + if 'impersonate_user' in g: + del g.impersonate_user + + admin_session: AdminSessionModel = UserService.get_admin_session() + if admin_session: + session.delete(admin_session) + session.commit() @staticmethod def in_list(uids, allow_admin_impersonate=False): @@ -73,3 +98,20 @@ class UserService(object): if user.uid in uids: return True return False + + @staticmethod + def get_admin_session() -> AdminSessionModel: + if UserService.user_is_admin(): + return session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first() + else: + raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403) + + @staticmethod + def get_admin_session_user() -> UserModel: + if UserService.user_is_admin(): + admin_session = UserService.get_admin_session() + + if admin_session is not None: + return session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first() + else: + raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403) \ No newline at end of file diff --git a/migrations/versions/ab06a94e5d4c_.py b/migrations/versions/ab06a94e5d4c_.py new file mode 100644 index 00000000..5d9335dc --- /dev/null +++ b/migrations/versions/ab06a94e5d4c_.py @@ -0,0 +1,34 @@ +"""empty message + +Revision ID: ab06a94e5d4c +Revises: 2e7b377cbc7b +Create Date: 2020-07-30 11:23:46.601338 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ab06a94e5d4c' +down_revision = '2e7b377cbc7b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('admin_session', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('token', sa.String(), nullable=True), + sa.Column('admin_impersonate_uid', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('token') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('admin_session') + # ### end Alembic commands ### diff --git a/tests/base_test.py b/tests/base_test.py index 747949f3..af1cace4 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -8,7 +8,7 @@ import json import unittest import urllib.parse import datetime -from flask import g, session as flask_session +from flask import g from sqlalchemy import Sequence from crc import app, db, session @@ -149,19 +149,19 @@ class BaseTest(unittest.TestCase): # If in production mode, only add the first user. if app.config['PRODUCTION']: - db.session.add(UserModel(**self.users[0])) + session.add(UserModel(**self.users[0])) else: for user_json in self.users: - db.session.add(UserModel(**user_json)) + session.add(UserModel(**user_json)) - db.session.commit() + session.commit() for study_json in self.studies: study_model = StudyModel(**study_json) - db.session.add(study_model) + session.add(study_model) StudyService._add_all_workflow_specs_to_study(study_model) - db.session.execute(Sequence(StudyModel.__tablename__ + '_id_seq')) - db.session.commit() - db.session.flush() + session.execute(Sequence(StudyModel.__tablename__ + '_id_seq')) + session.commit() + session.flush() specs = session.query(WorkflowSpecModel).all() self.assertIsNotNone(specs) @@ -185,8 +185,8 @@ class BaseTest(unittest.TestCase): """Loads a spec into the database based on a directory in /tests/data""" if category_id is None: category = WorkflowSpecCategoryModel(name="test", display_name="Test Workflows", display_order=0) - db.session.add(category) - db.session.commit() + session.add(category) + session.commit() category_id = category.id if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0: @@ -240,7 +240,7 @@ class BaseTest(unittest.TestCase): file = open(file_path, "rb") data = file.read() - file_model = db.session.query(FileModel).filter(FileModel.name == name).first() + file_model = session.query(FileModel).filter(FileModel.name == name).first() noise, file_extension = os.path.splitext(file_path) content_type = CONTENT_TYPES[file_extension[1:]] file_service.update_file(file_model, data, content_type) @@ -249,8 +249,8 @@ class BaseTest(unittest.TestCase): user = session.query(UserModel).filter(UserModel.uid == uid).first() if user is None: user = UserModel(uid=uid, email_address=email, display_name=display_name) - db.session.add(user) - db.session.commit() + session.add(user) + session.commit() return user def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", @@ -260,8 +260,8 @@ class BaseTest(unittest.TestCase): user = self.create_user(uid=uid) study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.active, user_uid=user.uid, primary_investigator_id=primary_investigator_id) - db.session.add(study) - db.session.commit() + session.add(study) + session.commit() return study def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses, @@ -288,8 +288,8 @@ class BaseTest(unittest.TestCase): return full_study def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"): - db.session.flush() - spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first() + session.flush() + spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first() if spec is None: if display_name is None: display_name = workflow_name @@ -322,8 +322,8 @@ class BaseTest(unittest.TestCase): version = version or 1 approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, version=version) - db.session.add(approval) - db.session.commit() + session.add(approval) + session.commit() return approval def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"): @@ -415,17 +415,8 @@ class BaseTest(unittest.TestCase): return workflow def logout(self): - print("logout before 'user' in g", 'user' in g) - print('logout before flask_session', flask_session) - print("logout before 'impersonate_user' in g", 'impersonate_user' in g) - if 'user' in g: del g.user - flask_session.clear() if 'impersonate_user' in g: del g.impersonate_user - - print("logout after 'user' in g", 'user' in g) - print('logout after flask_session', flask_session) - print("logout after 'impersonate_user' in g", 'impersonate_user' in g) diff --git a/tests/test_authentication.py b/tests/test_authentication.py index 61f578a0..469885f4 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -5,7 +5,7 @@ from datetime import timezone, datetime, timedelta import jwt from tests.base_test import BaseTest -from crc import db, app +from crc import app, session from crc.api.common import ApiError from crc.models.protocol_builder import ProtocolBuilderStatus from crc.models.study import StudySchema, StudyModel @@ -62,7 +62,7 @@ class TestAuthentication(BaseTest): def test_non_production_auth_creates_user(self): new_uid = self.non_admin_uid ## Assure this user id is in the fake responses from ldap. self.load_example_data() - user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first() + user = session.query(UserModel).filter(UserModel.uid == new_uid).first() self.assertIsNone(user) user_info = {'uid': new_uid, 'first_name': 'Cordi', 'last_name': 'Nator', @@ -74,7 +74,7 @@ class TestAuthentication(BaseTest): self.assertTrue(rv_1.status_code == 302) self.assertTrue(str.startswith(rv_1.location, redirect_url)) - user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first() + user = session.query(UserModel).filter(UserModel.uid == new_uid).first() self.assertIsNotNone(user) self.assertIsNotNone(user.display_name) self.assertIsNotNone(user.email_address) @@ -91,7 +91,7 @@ class TestAuthentication(BaseTest): self.load_example_data() # User should not be in the system yet. - user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() + user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() self.assertIsNone(user) # Log in @@ -143,7 +143,7 @@ class TestAuthentication(BaseTest): self.assert_success(rv_add_study, 'Admin user should be able to add a study') new_admin_study = json.loads(rv_add_study.get_data(as_text=True)) - db_admin_study = db.session.query(StudyModel).filter_by(id=new_admin_study['id']).first() + db_admin_study = session.query(StudyModel).filter_by(id=new_admin_study['id']).first() self.assertIsNotNone(db_admin_study) rv_del_study = self.app.delete( @@ -176,7 +176,7 @@ class TestAuthentication(BaseTest): self.assert_success(rv_add_study, 'Non-admin user should be able to add a study') new_non_admin_study = json.loads(rv_add_study.get_data(as_text=True)) - db_non_admin_study = db.session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first() + db_non_admin_study = session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first() self.assertIsNotNone(db_non_admin_study) rv_non_admin_del_study = self.app.delete( @@ -197,7 +197,7 @@ class TestAuthentication(BaseTest): rv = self.app.get('/v1.0/user', headers=self.logged_in_headers()) self.assert_success(rv) - all_users = db.session.query(UserModel).all() + all_users = session.query(UserModel).all() rv = self.app.get('/v1.0/list_users', headers=self.logged_in_headers()) self.assert_success(rv) @@ -214,7 +214,7 @@ class TestAuthentication(BaseTest): admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) # User should not be in the system yet. - non_admin_user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() + non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() self.assertIsNone(non_admin_user) # Admin should not be able to impersonate non-existent user @@ -224,9 +224,7 @@ class TestAuthentication(BaseTest): headers=admin_token_headers, follow_redirects=False ) - self.assert_success(rv_1) - user_data_1 = json.loads(rv_1.get_data(as_text=True)) - self.assertEqual(user_data_1['uid'], self.admin_uid, 'Admin user should be logged in as themselves') + self.assert_failure(rv_1, 400) # Add the non-admin user now self.logout() @@ -290,7 +288,7 @@ class TestAuthentication(BaseTest): rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers) self.assert_success(rv) - admin_user = db.session.query(UserModel).filter(UserModel.uid == self.admin_uid).first() + admin_user = session.query(UserModel).filter(UserModel.uid == self.admin_uid).first() self.assertIsNotNone(admin_user) self.assertEqual(self.admin_uid, admin_user.uid) self.assertTrue(admin_user.is_admin()) @@ -310,7 +308,7 @@ class TestAuthentication(BaseTest): ) self.assert_success(rv) - user = db.session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() + user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() self.assertIsNotNone(user) self.assertFalse(user.is_admin()) self.assertIsNotNone(user) From 28d3f835e87065c3529613e25b359fb58124fcea Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Thu, 30 Jul 2020 13:21:50 -0400 Subject: [PATCH 28/31] Updates package hashes --- Pipfile.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/Pipfile.lock b/Pipfile.lock index f86d6e78..bb4a7d1d 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -247,6 +247,7 @@ "sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74", "sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.10" }, "docutils": { From 9112be548de0c401412f4a8bce02686ba6807f4c Mon Sep 17 00:00:00 2001 From: Dan Funk Date: Thu, 30 Jul 2020 15:04:09 -0400 Subject: [PATCH 29/31] Adding box as a direct dependency. Really uncertain how this is working everwhere but in the actual deployment. --- Pipfile | 1 + Pipfile.lock | 10 +++++++++- setup.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Pipfile b/Pipfile index 56f3bc26..f16e89fa 100644 --- a/Pipfile +++ b/Pipfile @@ -46,6 +46,7 @@ werkzeug = "*" xlrd = "*" xlsxwriter = "*" pygithub = "*" +python-box = "*" [requires] python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 8ce6175a..7ca0d3d7 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "381d29428eb328ad6167774b510b9d818bd1505b95f50454a19f1564782326cc" + "sha256": "45dc348da1f583da4a7c76113456b3f0225736e79a5da05ba2af9ede7f8089e0" }, "pipfile-spec": 6, "requires": { @@ -659,6 +659,14 @@ ], "version": "==0.16.0" }, + "python-box": { + "hashes": [ + "sha256:bcb057e8960f4d888a4caf8f668eeca3c5c61ad349d8d81c4339414984fa9454", + "sha256:f02e059a299cac0515687aafec7543d401b12759d6578e53fae74154e0cbaa79" + ], + "index": "pypi", + "version": "==5.1.0" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", diff --git a/setup.py b/setup.py index 159a3d35..74c022d1 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,3 @@ from setuptools import setup -setup(setup_requires=["pbr"], pbr=True) +setup(setup_requires=["pbr"], pbr=True, install_requires=['box']) From 438a31c9ece37e57997f693d859eef7eb14fde96 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Fri, 31 Jul 2020 13:19:26 -0400 Subject: [PATCH 30/31] Updates packages --- Pipfile | 3 +-- Pipfile.lock | 47 +++++++++-------------------------------------- 2 files changed, 10 insertions(+), 40 deletions(-) diff --git a/Pipfile b/Pipfile index f16e89fa..2cc41e39 100644 --- a/Pipfile +++ b/Pipfile @@ -38,9 +38,8 @@ recommonmark = "*" requests = "*" sentry-sdk = {extras = ["flask"],version = "==0.14.4"} sphinx = "*" -spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"} -#spiffworkflow = {editable = true,path="/home/kelly/sartography/SpiffWorkflow/"} swagger-ui-bundle = "*" +spiffworkflow = {editable = true, git = "https://github.com/sartography/SpiffWorkflow.git", ref = "master"} webtest = "*" werkzeug = "*" xlrd = "*" diff --git a/Pipfile.lock b/Pipfile.lock index c955e731..634fea80 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "45dc348da1f583da4a7c76113456b3f0225736e79a5da05ba2af9ede7f8089e0" + "sha256": "096abf7ce152358489282a004ed634ca64730cb98276f3a513ed2d5b8a6635c6" }, "pipfile-spec": 6, "requires": { @@ -32,11 +32,11 @@ }, "amqp": { "hashes": [ - "sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b", - "sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139" + "sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21", + "sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.6.0" + "version": "==2.6.1" }, "aniso8601": { "hashes": [ @@ -247,6 +247,7 @@ "sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74", "sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.10" }, "docutils": { @@ -378,14 +379,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, - "importlib-metadata": { - "hashes": [ - "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83", - "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070" - ], - "markers": "python_version < '3.8'", - "version": "==1.7.0" - }, "inflection": { "hashes": [ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", @@ -955,6 +948,7 @@ "version": "==1.1.4" }, "spiffworkflow": { + "editable": true, "git": "https://github.com/sartography/SpiffWorkflow.git", "ref": "7c8d59e7b9a978795bc8d1f354002fdc89540672" }, @@ -1076,14 +1070,6 @@ ], "index": "pypi", "version": "==1.3.0" - }, - "zipp": { - "hashes": [ - "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", - "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" - ], - "markers": "python_version >= '3.6'", - "version": "==3.1.0" } }, "develop": { @@ -1135,19 +1121,12 @@ "index": "pypi", "version": "==5.2.1" }, - "importlib-metadata": { - "hashes": [ - "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83", - "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070" - ], - "markers": "python_version < '3.8'", - "version": "==1.7.0" - }, "iniconfig": { "hashes": [ - "sha256:aa0b40f50a00e72323cb5d41302f9c6165728fd764ac8822aa3fff00a40d56b4" + "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437", + "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69" ], - "version": "==1.0.0" + "version": "==1.0.1" }, "more-itertools": { "hashes": [ @@ -1219,14 +1198,6 @@ "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" ], "version": "==0.10.1" - }, - "zipp": { - "hashes": [ - "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", - "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" - ], - "markers": "python_version >= '3.6'", - "version": "==3.1.0" } } } From ca9ef332baece1b861c98c786dff8c9a24d2e153 Mon Sep 17 00:00:00 2001 From: Aaron Louie Date: Fri, 31 Jul 2020 13:19:37 -0400 Subject: [PATCH 31/31] Fixes failing test. --- crc/services/workflow_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crc/services/workflow_processor.py b/crc/services/workflow_processor.py index 29d2bf51..c58fa098 100644 --- a/crc/services/workflow_processor.py +++ b/crc/services/workflow_processor.py @@ -190,7 +190,7 @@ class WorkflowProcessor(object): bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only #try: - bpmn_workflow.do_engine_steps() + # bpmn_workflow.do_engine_steps() # except WorkflowException as we: # raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender) return bpmn_workflow