Merge pull request #22 from sartography/feature/workflow_spec_categories

Feature/workflow spec categories
This commit is contained in:
Aaron Louie 2020-03-16 14:00:10 -04:00 committed by GitHub
commit 95f4592f71
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 618 additions and 62 deletions

View File

@ -15,6 +15,7 @@ connexion_app = connexion.FlaskApp(__name__)
app = connexion_app.app
app.config.from_object('config.default')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
auth = HTTPTokenAuth('Bearer')
if "TESTING" in os.environ and os.environ["TESTING"] == "true":

View File

@ -443,11 +443,14 @@ paths:
content:
multipart/form-data:
schema:
x-body-name: file
type: object
properties:
file:
type: string
format: binary
required:
- file
responses:
'200':
description: Returns the actual file
@ -716,10 +719,31 @@ components:
properties:
id:
type: string
name:
type: string
display_name:
type: string
description:
type: string
primary_process_id:
type: string
nullable: true
workflow_spec_category_id:
type: integer
nullable: true
workflow_spec_category:
$ref: "#/components/schemas/WorkflowSpecCategory"
is_status:
type: boolean
nullable: true
WorkflowSpecCategory:
properties:
id:
type: integer
name:
type: string
display_name:
type: string
File:
properties:
id:

View File

@ -23,16 +23,46 @@ def all_studies():
@auth.login_required
def add_study(body):
study = StudyModelSchema().load(body, session=session)
study: StudyModel = StudyModelSchema().load(body, session=session)
status_spec = __get_status_spec(study.status_spec_id)
# Get latest status spec version
if status_spec is not None:
study.status_spec_id = status_spec.id
study.status_spec_version = WorkflowProcessor.get_latest_version_string(status_spec.id)
session.add(study)
session.commit()
# FIXME: We need to ask the protocol builder what workflows to add to the study, not just add them all.
for spec in session.query(WorkflowSpecModel).all():
WorkflowProcessor.create(study.id, spec.id)
__add_study_workflows_from_status(study.id, status_spec)
return StudyModelSchema().dump(study)
def __get_status_spec(status_spec_id):
if status_spec_id is None:
return session.query(WorkflowSpecModel).filter_by(is_status=True).first()
else:
return session.query(WorkflowSpecModel).filter_by(id=status_spec_id).first()
def __add_study_workflows_from_status(study_id, status_spec):
all_specs = session.query(WorkflowSpecModel).all()
if status_spec is not None:
# Run status spec to get list of workflow specs applicable to this study
status_processor = WorkflowProcessor.create(study_id, status_spec)
status_processor.do_engine_steps()
status_data = status_processor.next_task().data
# Only add workflow specs listed in status spec
for spec in all_specs:
if spec.id in status_data and status_data[spec.id]:
WorkflowProcessor.create(study_id, spec.id)
else:
# No status spec. Just add all workflows.
for spec in all_specs:
WorkflowProcessor.create(study_id, spec.id)
@auth.login_required
def update_study(study_id, body):
if study_id is None:
@ -130,23 +160,63 @@ def post_update_study_from_protocol_builder(study_id):
@auth.login_required
def get_study_workflows(study_id):
workflow_models = session.query(WorkflowModel).filter_by(study_id=study_id).all()
# Get study
study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
# Get study status spec
status_spec: WorkflowSpecModel = session.query(WorkflowSpecModel)\
.filter_by(is_status=True).first()
status_data = None
if status_spec is not None:
# Run status spec
status_workflow_model: WorkflowModel = session.query(WorkflowModel)\
.filter_by(study_id=study.id)\
.filter_by(workflow_spec_id=status_spec.id)\
.first()
status_processor = WorkflowProcessor(status_workflow_model)
# Get list of active workflow specs for study
status_processor.do_engine_steps()
status_data = status_processor.bpmn_workflow.last_task.data
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.filter(WorkflowModel.workflow_spec_id != status_spec.id)\
.all()
else:
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.all()
api_models = []
for workflow_model in workflow_models:
processor = WorkflowProcessor(workflow_model,
workflow_model.bpmn_workflow_json)
api_models.append(__get_workflow_api_model(processor))
api_models.append(__get_workflow_api_model(processor, status_data))
schema = WorkflowApiSchema(many=True)
return schema.dump(api_models)
@auth.login_required
def add_workflow_to_study(study_id, body):
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id=body["id"]).first()
workflow_spec_model: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=body["id"]).first()
if workflow_spec_model is None:
error = ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.')
return ApiErrorSchema.dump(error), 404
processor = WorkflowProcessor.create(study_id, workflow_spec_model.id)
# If workflow spec is a status spec, update the study status spec
if workflow_spec_model.is_status:
study = session.query(StudyModel).filter_by(id=study_id).first()
study.status_spec_id = workflow_spec_model.id
study.status_spec_version = processor.get_spec_version()
session.add(study)
session.commit()
return WorkflowApiSchema().dump(__get_workflow_api_model(processor))
@ -193,3 +263,4 @@ def map_pb_study_to_study(pb_study):
study_info['inactive'] = False
return study_info

View File

@ -17,7 +17,8 @@ def all_specifications():
@auth.login_required
def add_workflow_specification(body):
new_spec = WorkflowSpecModelSchema().load(body, session=session)
new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
new_spec.is_status = new_spec.id == 'status'
session.add(new_spec)
session.commit()
return WorkflowSpecModelSchema().dump(new_spec)
@ -69,9 +70,14 @@ def delete_workflow_specification(spec_id):
session.commit()
def __get_workflow_api_model(processor: WorkflowProcessor):
def __get_workflow_api_model(processor: WorkflowProcessor, status_data=None):
spiff_tasks = processor.get_all_user_tasks()
user_tasks = list(map(Task.from_spiff, spiff_tasks))
is_active = True
if status_data is not None and processor.workflow_spec_id in status_data:
is_active = status_data[processor.workflow_spec_id]
workflow_api = WorkflowApi(
id=processor.get_workflow_id(),
status=processor.get_status(),
@ -80,7 +86,8 @@ def __get_workflow_api_model(processor: WorkflowProcessor):
user_tasks=user_tasks,
workflow_spec_id=processor.workflow_spec_id,
spec_version=processor.get_spec_version(),
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id)
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id),
is_active=is_active
)
if processor.next_task():
workflow_api.next_task = Task.from_spiff(processor.next_task())
@ -89,9 +96,8 @@ def __get_workflow_api_model(processor: WorkflowProcessor):
@auth.login_required
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
workflow_api_model = __get_workflow_api_model(processor)
update_workflow_stats(workflow_model, workflow_api_model)
return WorkflowApiSchema().dump(workflow_api_model)

View File

@ -95,7 +95,8 @@ class TaskSchema(ma.Schema):
class WorkflowApi(object):
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version, is_latest_spec):
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version,
is_latest_spec, is_active):
self.id = id
self.status = status
self.user_tasks = user_tasks
@ -104,13 +105,13 @@ class WorkflowApi(object):
self.workflow_spec_id = workflow_spec_id
self.spec_version = spec_version
self.is_latest_spec = is_latest_spec
self.is_active = is_active
class WorkflowApiSchema(ma.Schema):
class Meta:
model = WorkflowApi
fields = ["id", "status", "user_tasks", "last_task", "next_task",
"workflow_spec_id", "spec_version", "is_latest_spec",
"num_tasks_total", "num_tasks_complete", "num_tasks_incomplete"]
"workflow_spec_id", "spec_version", "is_latest_spec", "is_active"]
unknown = INCLUDE
status = EnumField(WorkflowStatus)
@ -120,15 +121,7 @@ class WorkflowApiSchema(ma.Schema):
@marshmallow.post_load
def make_workflow(self, data, **kwargs):
keys = [
'id',
'status',
'user_tasks',
'last_task',
'next_task',
'workflow_spec_id',
'spec_version',
'is_latest_spec'
]
keys = ['id', 'status', 'user_tasks', 'last_task', 'next_task',
'workflow_spec_id', 'spec_version', 'is_latest_spec', "is_active"]
filtered_fields = {key: data[key] for key in keys}
return WorkflowApi(**filtered_fields)

View File

@ -1,7 +1,7 @@
import enum
from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import ModelSchema
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func
from sqlalchemy.dialects.postgresql import UUID
@ -71,6 +71,7 @@ class FileModel(db.Model):
name = db.Column(db.String)
type = db.Column(db.Enum(FileType))
primary = db.Column(db.Boolean)
is_status = db.Column(db.Boolean)
content_type = db.Column(db.String)
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
@ -80,8 +81,10 @@ class FileModel(db.Model):
latest_version = db.Column(db.Integer, default=0)
class FileModelSchema(ModelSchema):
class FileModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = FileModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
type = EnumField(FileType)

View File

@ -1,4 +1,4 @@
from marshmallow_sqlalchemy import ModelSchema
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db
@ -16,9 +16,11 @@ class WorkflowStatsModel(db.Model):
last_updated = db.Column(db.DateTime)
class WorkflowStatsModelSchema(ModelSchema):
class WorkflowStatsModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = WorkflowStatsModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
@ -35,7 +37,9 @@ class TaskEventModel(db.Model):
date = db.Column(db.DateTime)
class TaskEventModelSchema(ModelSchema):
class TaskEventModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = TaskEventModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys

View File

@ -1,5 +1,5 @@
from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import ModelSchema
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func
from crc import db
@ -20,11 +20,15 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
inactive = db.Column(db.Boolean, default=False)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
status_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
status_spec_version = db.Column(db.String)
class StudyModelSchema(ModelSchema):
class StudyModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = StudyModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
protocol_builder_status = EnumField(ProtocolBuilderStatus)

View File

@ -1,7 +1,7 @@
import datetime
import jwt
from marshmallow_sqlalchemy import ModelSchema
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db, app
from crc.api.common import ApiError
@ -52,7 +52,9 @@ class UserModel(db.Model):
raise ApiError('token_invalid', 'The Authentication token you provided. You need a new token. ')
class UserModelSchema(ModelSchema):
class UserModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = UserModel
load_instance = True
include_relationships = True

View File

@ -1,9 +1,22 @@
import enum
from marshmallow_sqlalchemy import ModelSchema
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db
class WorkflowSpecCategoryModel(db.Model):
__tablename__ = 'workflow_spec_category'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
display_name = db.Column(db.String)
class WorkflowSpecCategoryModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = WorkflowSpecCategoryModel
load_instance = True
include_relationships = True
class WorkflowSpecModel(db.Model):
__tablename__ = 'workflow_spec'
@ -12,11 +25,17 @@ class WorkflowSpecModel(db.Model):
display_name = db.Column(db.String)
description = db.Column(db.Text)
primary_process_id = db.Column(db.String)
workflow_spec_category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
workflow_spec_category = db.relationship("WorkflowSpecCategoryModel")
is_status = db.Column(db.Boolean, default=False)
class WorkflowSpecModelSchema(ModelSchema):
class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = WorkflowSpecModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
class WorkflowStatus(enum.Enum):
@ -34,4 +53,3 @@ class WorkflowModel(db.Model):
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
spec_version = db.Column(db.String)

View File

@ -28,6 +28,11 @@ class CompleteTemplate(Script):
"the name of the docx template to use.")
file_name = args[0]
workflow_spec_model = self.find_spec_model_in_db(task.workflow)
task_study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if task_study_id != study_id:
raise ApiError(code="invalid_argument",
message="The given task does not match the given study.")
if workflow_spec_model is None:
raise ApiError(code="workflow_model_error",
@ -44,7 +49,6 @@ class CompleteTemplate(Script):
"within workflow specification '%s'") % (args[0], workflow_spec_model.id)
final_document_stream = self.make_template(BytesIO(file_data_model.data), task.data)
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
name=file_name,

View File

@ -16,12 +16,13 @@ class FileService(object):
@staticmethod
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
name, content_type, binary_data, primary=False):
name, content_type, binary_data, primary=False, is_status=False):
"""Create a new file and associate it with a workflow spec."""
file_model = FileModel(
workflow_spec_id=workflow_spec.id,
name=name,
primary=primary
primary=primary,
is_status=is_status
)
if primary:
bpmn: ElementTree.Element = ElementTree.fromstring(binary_data)

View File

@ -82,11 +82,11 @@ class ExampleDataLoader:
returns an array of data models to be added to the database."""
global file
file_service = FileService()
spec = WorkflowSpecModel(id=id,
name=name,
display_name=display_name,
description=description)
description=description,
is_status=id == 'status')
db.session.add(spec)
db.session.commit()
if not filepath:
@ -95,13 +95,15 @@ class ExampleDataLoader:
for file_path in files:
noise, file_extension = os.path.splitext(file_path)
filename = os.path.basename(file_path)
is_primary = filename.lower() == id + ".bpmn"
is_status = filename.lower() == 'status.bpmn'
is_primary = filename.lower() == id + '.bpmn'
try:
file = open(file_path, "rb")
file = open(file_path, 'rb')
data = file.read()
content_type = CONTENT_TYPES[file_extension[1:]]
file_service.add_workflow_spec_file(workflow_spec=spec, name=filename, content_type=content_type,
binary_data=data, primary=is_primary)
binary_data=data, primary=is_primary, is_status=is_status)
except IsADirectoryError as de:
# Ignore sub directories
pass

View File

@ -0,0 +1,41 @@
"""empty message
Revision ID: 5f06108116ae
Revises: 90dd63672e0a
Create Date: 2020-03-13 14:55:32.214380
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5f06108116ae'
down_revision = '90dd63672e0a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('workflow_spec_category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('display_name', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.add_column('file', sa.Column('is_status', sa.Boolean(), nullable=True))
op.add_column('workflow_spec', sa.Column('is_status', sa.Boolean(), nullable=True))
op.add_column('workflow_spec', sa.Column('workflow_spec_category_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'workflow_spec', 'workflow_spec_category', ['workflow_spec_category_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'workflow_spec', type_='foreignkey')
op.drop_column('workflow_spec', 'workflow_spec_category_id')
op.drop_column('workflow_spec', 'is_status')
op.drop_column('file', 'is_status')
op.drop_table('workflow_spec_category')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""empty message
Revision ID: 65f3fce6031a
Revises: 5f06108116ae
Create Date: 2020-03-15 12:40:42.314190
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '65f3fce6031a'
down_revision = '5f06108116ae'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('status_spec_id', sa.String(), nullable=True))
op.add_column('study', sa.Column('status_spec_version', sa.String(), nullable=True))
op.create_foreign_key(None, 'study', 'workflow_spec', ['status_spec_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'study', type_='foreignkey')
op.drop_column('study', 'status_spec_version')
op.drop_column('study', 'status_spec_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_data_security_plan" name="should_enable_crc2_training_session_data_security_plan">
<extensionElements>
<biodi:bounds x="190" y="80" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_1mjqwlv">
<input id="InputClause_18pwfqu" label="some_input">
<inputExpression id="LiteralExpression_1y84stb" typeRef="boolean" expressionLanguage="feel" />
</input>
<input id="InputClause_0ahp5b9">
<inputExpression id="LiteralExpression_1ptkp9l" typeRef="string">
<text></text>
</inputExpression>
</input>
<output id="OutputClause_05y0j7c" label="crc2_training_session_data_security_plan" name="crc2_training_session_data_security_plan" typeRef="boolean" />
<rule id="DecisionRule_17xsr74">
<inputEntry id="UnaryTests_05ldcq4">
<text>false</text>
</inputEntry>
<inputEntry id="UnaryTests_1ebers6">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_09oao3s">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0zavblw">
<inputEntry id="UnaryTests_09xdkib">
<text>true</text>
</inputEntry>
<inputEntry id="UnaryTests_1hqb6bs">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_0y2v9zc">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_enter_core_info" name="should_enable_crc2_training_session_enter_core_info">
<extensionElements>
<biodi:bounds x="170" y="60" width="180" height="80" />
</extensionElements>
<decisionTable id="decisionTable_1">
<input id="input_1" label="some_input">
<inputExpression id="inputExpression_1" typeRef="boolean" expressionLanguage="FEEL">
<text></text>
</inputExpression>
</input>
<output id="output_1" label="crc2_training_session_enter_core_info" name="crc2_training_session_enter_core_info" typeRef="boolean" />
<rule id="DecisionRule_10oo3ms">
<inputEntry id="UnaryTests_1ozg74s">
<text>false</text>
</inputEntry>
<outputEntry id="LiteralExpression_1d9565g">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_16ptqq8">
<inputEntry id="UnaryTests_1er4nj8">
<text>true</text>
</inputEntry>
<outputEntry id="LiteralExpression_1f34b59">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_sponsor_funding_source" name="should_enable_crc2_training_session_sponsor_funding_source">
<extensionElements>
<biodi:bounds x="190" y="70" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="some_input">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="boolean" expressionLanguage="feel" />
</input>
<output id="OutputClause_1ybi1ud" label="crc2_training_session_sponsor_funding_source" name="crc2_training_session_sponsor_funding_source" typeRef="boolean" />
<rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2">
<text>false</text>
</inputEntry>
<outputEntry id="LiteralExpression_1pweuqc">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0j9wah0">
<inputEntry id="UnaryTests_0dzq6w4">
<text>true</text>
</inputEntry>
<outputEntry id="LiteralExpression_119lstj">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,121 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kudwnk" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_0jhpidf" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1ees8ka</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1ees8ka" sourceRef="StartEvent_1" targetRef="Activity_00rh8pw" />
<bpmn:businessRuleTask id="Activity_1k5eeun" name="Enable crc2_training_session_data_security_plan" camunda:decisionRef="should_enable_crc2_training_session_data_security_plan">
<bpmn:incoming>Flow_1nimppb</bpmn:incoming>
<bpmn:outgoing>Flow_1txrak2</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enable crc2_training_session_enter_core_info" camunda:decisionRef="should_enable_crc2_training_session_enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
<bpmn:outgoing>Flow_1sggkit</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:businessRuleTask id="Activity_16cm213" name="Enable crc2_training_session_sponsor_funding_source" camunda:decisionRef="should_enable_crc2_training_session_sponsor_funding_source">
<bpmn:incoming>Flow_18pl92p</bpmn:incoming>
<bpmn:outgoing>Flow_0x9580l</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:parallelGateway id="Gateway_1nta7st">
<bpmn:incoming>Flow_024q2cw</bpmn:incoming>
<bpmn:outgoing>Flow_1m8285h</bpmn:outgoing>
<bpmn:outgoing>Flow_1nimppb</bpmn:outgoing>
<bpmn:outgoing>Flow_18pl92p</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_1m8285h" sourceRef="Gateway_1nta7st" targetRef="Activity_1yqy50i" />
<bpmn:sequenceFlow id="Flow_1nimppb" sourceRef="Gateway_1nta7st" targetRef="Activity_1k5eeun" />
<bpmn:sequenceFlow id="Flow_18pl92p" sourceRef="Gateway_1nta7st" targetRef="Activity_16cm213" />
<bpmn:parallelGateway id="Gateway_12tpgcy">
<bpmn:incoming>Flow_1txrak2</bpmn:incoming>
<bpmn:incoming>Flow_1sggkit</bpmn:incoming>
<bpmn:incoming>Flow_0x9580l</bpmn:incoming>
<bpmn:outgoing>Flow_0pwtiqm</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:endEvent id="Event_135x8jg">
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0pwtiqm" sourceRef="Gateway_12tpgcy" targetRef="Event_135x8jg" />
<bpmn:sequenceFlow id="Flow_0x9580l" sourceRef="Activity_16cm213" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_1txrak2" sourceRef="Activity_1k5eeun" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_1sggkit" sourceRef="Activity_1yqy50i" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_024q2cw" sourceRef="Activity_00rh8pw" targetRef="Gateway_1nta7st" />
<bpmn:userTask id="Activity_00rh8pw" name="Enable specs" camunda:formKey="form_key">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="some_input" label="some_input" type="boolean" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>Flow_024q2cw</bpmn:outgoing>
</bpmn:userTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0jhpidf">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="211" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1ees8ka_di" bpmnElement="SequenceFlow_1ees8ka">
<di:waypoint x="188" y="229" />
<di:waypoint x="230" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1k5eeun_di" bpmnElement="Activity_1k5eeun">
<dc:Bounds x="460" y="189" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yqy50i_di" bpmnElement="Activity_1yqy50i">
<dc:Bounds x="460" y="80" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_16cm213_di" bpmnElement="Activity_16cm213">
<dc:Bounds x="460" y="300" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1m22g4p_di" bpmnElement="Gateway_1nta7st">
<dc:Bounds x="378" y="204" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1m8285h_di" bpmnElement="Flow_1m8285h">
<di:waypoint x="403" y="204" />
<di:waypoint x="403" y="120" />
<di:waypoint x="460" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1nimppb_di" bpmnElement="Flow_1nimppb">
<di:waypoint x="428" y="229" />
<di:waypoint x="460" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_18pl92p_di" bpmnElement="Flow_18pl92p">
<di:waypoint x="403" y="254" />
<di:waypoint x="403" y="340" />
<di:waypoint x="460" y="340" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1kk6x70_di" bpmnElement="Gateway_12tpgcy">
<dc:Bounds x="595" y="204" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_135x8jg_di" bpmnElement="Event_135x8jg">
<dc:Bounds x="682" y="211" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0pwtiqm_di" bpmnElement="Flow_0pwtiqm">
<di:waypoint x="645" y="229" />
<di:waypoint x="682" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0x9580l_di" bpmnElement="Flow_0x9580l">
<di:waypoint x="560" y="340" />
<di:waypoint x="620" y="340" />
<di:waypoint x="620" y="254" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1txrak2_di" bpmnElement="Flow_1txrak2">
<di:waypoint x="560" y="229" />
<di:waypoint x="595" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1sggkit_di" bpmnElement="Flow_1sggkit">
<di:waypoint x="560" y="120" />
<di:waypoint x="620" y="120" />
<di:waypoint x="620" y="204" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_024q2cw_di" bpmnElement="Flow_024q2cw">
<di:waypoint x="330" y="229" />
<di:waypoint x="378" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0vfs7g0_di" bpmnElement="Activity_00rh8pw">
<dc:Bounds x="230" y="189" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -3,7 +3,7 @@ from datetime import datetime, timezone
from unittest.mock import patch, Mock
from crc import session
from crc.models.api_models import WorkflowApiSchema
from crc.models.api_models import WorkflowApiSchema, WorkflowApi
from crc.models.study import StudyModel, StudyModelSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
ProtocolBuilderStudySchema
@ -160,9 +160,6 @@ class TestStudyApi(BaseTest):
rv = self.app.delete('/v1.0/study/%i' % study.id)
self.assert_failure(rv, error_code="study_integrity_error")
def test_delete_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
@ -207,3 +204,79 @@ class TestStudyApi(BaseTest):
json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(1, len(workflows_after))
"""
Workflow Specs that have been made available (or not) to a particular study via the status.bpmn should be flagged
as available (or not) when the list of a study's workflows is retrieved.
"""
def test_workflow_spec_status(self):
self.load_example_data()
study = session.query(StudyModel).first()
study_id = study.id
# Add status workflow
self.load_test_spec('status')
# Add status workflow to the study
status_spec = session.query(WorkflowSpecModel).filter_by(is_status=True).first()
add_status_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(status_spec)))
self.assert_success(add_status_response)
json_data_status = json.loads(add_status_response.get_data(as_text=True))
status_workflow: WorkflowApi = WorkflowApiSchema().load(json_data_status)
self.assertIsNotNone(status_workflow)
self.assertIsNotNone(status_workflow.workflow_spec_id)
self.assertIsNotNone(status_workflow.spec_version)
self.assertIsNotNone(status_workflow.next_task)
self.assertIsNotNone(status_workflow.next_task['id'])
status_task_id = status_workflow.next_task['id']
# Verify that the study status spec is populated
updated_study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
self.assertIsNotNone(updated_study)
self.assertIsNotNone(updated_study.status_spec_id)
self.assertIsNotNone(updated_study.status_spec_version)
self.assertEqual(updated_study.status_spec_id, status_workflow.workflow_spec_id)
self.assertEqual(updated_study.status_spec_version, status_workflow.spec_version)
# Add all available non-status workflows to the study
specs = session.query(WorkflowSpecModel).filter_by(is_status=False).all()
for spec in specs:
add_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(add_response)
for is_active in [False, True]:
# Set all workflow specs to inactive|active
update_status_response = self.app.put('/v1.0/workflow/%i/task/%s/data' % (status_workflow.id, status_task_id),
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps({'some_input': is_active}))
self.assert_success(update_status_response)
json_workflow_api = json.loads(update_status_response.get_data(as_text=True))
updated_workflow_api: WorkflowApi = WorkflowApiSchema().load(json_workflow_api)
self.assertIsNotNone(updated_workflow_api)
self.assertEqual(updated_workflow_api.status, WorkflowStatus.complete)
self.assertIsNotNone(updated_workflow_api.last_task)
self.assertIsNotNone(updated_workflow_api.last_task['data'])
self.assertIsNotNone(updated_workflow_api.last_task['data']['some_input'])
self.assertEqual(updated_workflow_api.last_task['data']['some_input'], is_active)
# List workflows for study
response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_after)
json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(len(specs), len(workflows_after))
# All workflows should be inactive|active
for workflow in workflows_after:
self.assertEqual(workflow.is_active, is_active)

View File

@ -159,7 +159,7 @@ class TestTasksApi(BaseTest):
}
workflow_api = self.complete_form(workflow, tasks[0], data)
self.assertIsNotNone(workflow_api.next_task)
self.assertEquals("EndEvent_0evb22x", workflow_api.next_task['name'])
self.assertEqual("EndEvent_0evb22x", workflow_api.next_task['name'])
self.assertTrue(workflow_api.status == WorkflowStatus.complete)
rv = self.app.get('/v1.0/file?workflow_id=%i' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv)

View File

@ -165,7 +165,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
with self.assertRaises(ApiError) as context:
WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.assertEquals("workflow_validation_error", context.exception.code)
self.assertEqual("workflow_validation_error", context.exception.code)
self.assertTrue("bpmn:startEvent" in context.exception.message)
def test_workflow_spec_key_error(self):
@ -311,14 +311,14 @@ class TestWorkflowProcessor(BaseTest):
# Setting up another processor should not error out, but doesn't pick up the update.
workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(workflow_model)
self.assertEquals("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
self.assertNotEquals("# This is some documentation I wanted to add.",
self.assertEqual("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
self.assertNotEqual("# This is some documentation I wanted to add.",
processor2.bpmn_workflow.last_task.task_spec.documentation)
# You can do a soft update and get the right response.
processor3 = WorkflowProcessor(workflow_model, soft_reset=True)
self.assertEquals("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
self.assertEquals("# This is some documentation I wanted to add.",
self.assertEqual("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
self.assertEqual("# This is some documentation I wanted to add.",
processor3.bpmn_workflow.last_task.task_spec.documentation)
@ -336,7 +336,7 @@ class TestWorkflowProcessor(BaseTest):
task.data = {"color": "blue"}
processor.complete_task(task)
next_task = processor.next_task()
self.assertEquals("Step 2", next_task.task_spec.description)
self.assertEqual("Step 2", next_task.task_spec.description)
# Modify the specification, with a major change that alters the flow and can't be serialized effectively.
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn')
@ -349,13 +349,40 @@ class TestWorkflowProcessor(BaseTest):
# Do a hard reset, which should bring us back to the beginning, but retain the data.
processor3 = WorkflowProcessor(workflow_model, hard_reset=True)
self.assertEquals("Step 1", processor3.next_task().task_spec.description)
self.assertEquals({"color": "blue"}, processor3.next_task().data)
self.assertEqual("Step 1", processor3.next_task().task_spec.description)
self.assertEqual({"color": "blue"}, processor3.next_task().data)
processor3.complete_task(processor3.next_task())
self.assertEquals("New Step", processor3.next_task().task_spec.description)
self.assertEquals({"color": "blue"}, processor3.next_task().data)
self.assertEqual("New Step", processor3.next_task().task_spec.description)
self.assertEqual({"color": "blue"}, processor3.next_task().data)
def test_get_latest_spec_version(self):
workflow_spec_model = self.load_test_spec("two_forms")
version = WorkflowProcessor.get_latest_version_string("two_forms")
self.assertTrue(version.startswith("v1 "))
def test_status_bpmn(self):
self.load_example_data()
specs = session.query(WorkflowSpecModel).all()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("status")
for enabled in [True, False]:
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
task = processor.next_task()
# Turn all specs on or off
task.data = {"some_input": enabled}
processor.complete_task(task)
# Finish out rest of workflow
while processor.get_status() == WorkflowStatus.waiting:
task = processor.next_task()
processor.complete_task(task)
self.assertEqual(processor.get_status(), WorkflowStatus.complete)
# Enabled status of all specs should match the value set in the first task
for spec in specs:
self.assertEqual(task.data[spec.id], enabled)

View File

@ -2,7 +2,7 @@ import json
from crc import session
from crc.models.file import FileModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
from tests.base_test import BaseTest
@ -25,7 +25,7 @@ class TestWorkflowSpec(BaseTest):
def test_add_new_workflow_specification(self):
self.load_example_data();
num_before = session.query(WorkflowSpecModel).count()
spec = WorkflowSpecModel(id='make_cookies', display_name='Cooooookies',
spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies',
description='Om nom nom delicious cookies')
rv = self.app.post('/v1.0/workflow-specification',
headers=self.logged_in_headers(),
@ -46,6 +46,32 @@ class TestWorkflowSpec(BaseTest):
api_spec = WorkflowSpecModelSchema().load(json_data, session=session)
self.assertEqual(db_spec, api_spec)
def test_update_workflow_specification(self):
self.load_example_data()
category = WorkflowSpecCategoryModel(id=0, name='trap', display_name="It's a trap!")
session.add(category)
session.commit()
db_spec_before: WorkflowSpecModel = session.query(WorkflowSpecModel).first()
spec_id = db_spec_before.id
self.assertIsNone(db_spec_before.workflow_spec_category_id)
db_spec_before.workflow_spec_category_id = 0
rv = self.app.put('/v1.0/workflow-specification/%s' % spec_id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(db_spec_before)))
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
api_spec = WorkflowSpecModelSchema().load(json_data, session=session)
self.assertEqual(db_spec_before, api_spec)
db_spec_after: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
self.assertIsNotNone(db_spec_after.workflow_spec_category_id)
self.assertIsNotNone(db_spec_after.workflow_spec_category)
self.assertEqual(db_spec_after.workflow_spec_category.display_name, category.display_name)
def test_delete_workflow_specification(self):
self.load_example_data()
spec_id = 'random_fact'