Merge remote-tracking branch 'origin/master' into feature/reference_files

This commit is contained in:
Dan Funk 2020-03-18 15:16:34 -04:00
commit 02be8ede75
27 changed files with 885 additions and 73 deletions

View File

@ -15,6 +15,7 @@ connexion_app = connexion.FlaskApp(__name__)
app = connexion_app.app app = connexion_app.app
app.config.from_object('config.default') app.config.from_object('config.default')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
auth = HTTPTokenAuth('Bearer') auth = HTTPTokenAuth('Bearer')
if "TESTING" in os.environ and os.environ["TESTING"] == "true": if "TESTING" in os.environ and os.environ["TESTING"] == "true":

View File

@ -443,11 +443,14 @@ paths:
content: content:
multipart/form-data: multipart/form-data:
schema: schema:
x-body-name: file
type: object type: object
properties: properties:
file: file:
type: string type: string
format: binary format: binary
required:
- file
responses: responses:
'200': '200':
description: Returns the actual file description: Returns the actual file
@ -561,6 +564,27 @@ paths:
responses: responses:
'204': '204':
description: The workflow was removed description: The workflow was removed
/workflow/{workflow_id}/stats:
parameters:
- name: workflow_id
in: path
required: true
description: The id of the workflow
schema:
type: integer
format: int32
get:
operationId: crc.api.stats.get_workflow_stats
summary: Stats about a given workflow
tags:
- Workflows and Tasks
responses:
'200':
description: Returns counts of complete, incomplete, and total number of tasks
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
# /v1.0/workflow/0/task/0 # /v1.0/workflow/0/task/0
/workflow/{workflow_id}/task/{task_id}: /workflow/{workflow_id}/task/{task_id}:
parameters: parameters:
@ -756,10 +780,31 @@ components:
properties: properties:
id: id:
type: string type: string
name:
type: string
display_name: display_name:
type: string type: string
description: description:
type: string type: string
primary_process_id:
type: string
nullable: true
workflow_spec_category_id:
type: integer
nullable: true
workflow_spec_category:
$ref: "#/components/schemas/WorkflowSpecCategory"
is_status:
type: boolean
nullable: true
WorkflowSpecCategory:
properties:
id:
type: integer
name:
type: string
display_name:
type: string
File: File:
properties: properties:
id: id:
@ -810,6 +855,12 @@ components:
type: string type: string
is_latest_spec: is_latest_spec:
type: boolean type: boolean
num_tasks_total:
type: integer
num_tasks_complete:
type: integer
num_tasks_incomplete:
type: integer
example: example:
id: 291234 id: 291234

59
crc/api/stats.py Normal file
View File

@ -0,0 +1,59 @@
from datetime import datetime
from flask import g
from crc import session, auth
from crc.models.stats import WorkflowStatsModel, WorkflowStatsModelSchema, TaskEventModel, TaskEventModelSchema
@auth.login_required
def get_workflow_stats(workflow_id):
workflow_model = session.query(WorkflowStatsModel).filter_by(workflow_id=workflow_id).first()
return WorkflowStatsModelSchema().dump(workflow_model)
@auth.login_required
def update_workflow_stats(workflow_model, workflow_api_model):
stats = session.query(WorkflowStatsModel) \
.filter_by(study_id=workflow_model.study_id) \
.filter_by(workflow_id=workflow_model.id) \
.filter_by(workflow_spec_id=workflow_model.workflow_spec_id) \
.filter_by(spec_version=workflow_model.spec_version) \
.first()
if stats is None:
stats = WorkflowStatsModel(
study_id=workflow_model.study_id,
workflow_id=workflow_model.id,
workflow_spec_id=workflow_model.workflow_spec_id,
spec_version=workflow_model.spec_version,
)
complete_states = ['CANCELLED', 'COMPLETED']
incomplete_states = ['MAYBE', 'LIKELY', 'FUTURE', 'WAITING', 'READY']
tasks = list(workflow_api_model.user_tasks)
stats.num_tasks_total = len(tasks)
stats.num_tasks_complete = sum(1 for t in tasks if t.state in complete_states)
stats.num_tasks_incomplete = sum(1 for t in tasks if t.state in incomplete_states)
stats.last_updated = datetime.now()
session.add(stats)
session.commit()
return WorkflowStatsModelSchema().dump(stats)
@auth.login_required
def log_task_complete(workflow_model, task_id):
task_event = TaskEventModel(
study_id=workflow_model.study_id,
user_uid=g.user.uid,
workflow_id=workflow_model.id,
workflow_spec_id=workflow_model.workflow_spec_id,
spec_version=workflow_model.spec_version,
task_id=task_id,
task_state='COMPLETE',
date=datetime.now(),
)
session.add(task_event)
session.commit()
return TaskEventModelSchema().dump(task_event)

View File

@ -23,16 +23,45 @@ def all_studies():
@auth.login_required @auth.login_required
def add_study(body): def add_study(body):
study = StudyModelSchema().load(body, session=session) study: StudyModel = StudyModelSchema().load(body, session=session)
status_spec = __get_status_spec(study.status_spec_id)
# Get latest status spec version
if status_spec is not None:
study.status_spec_id = status_spec.id
study.status_spec_version = WorkflowProcessor.get_latest_version_string(status_spec.id)
session.add(study) session.add(study)
session.commit() session.commit()
# FIXME: We need to ask the protocol builder what workflows to add to the study, not just add them all. __add_study_workflows_from_status(study.id, status_spec)
for spec in session.query(WorkflowSpecModel).all():
WorkflowProcessor.create(study.id, spec.id)
return StudyModelSchema().dump(study) return StudyModelSchema().dump(study)
def __get_status_spec(status_spec_id):
if status_spec_id is None:
return session.query(WorkflowSpecModel).filter_by(is_status=True).first()
else:
return session.query(WorkflowSpecModel).filter_by(id=status_spec_id).first()
def __add_study_workflows_from_status(study_id, status_spec):
all_specs = session.query(WorkflowSpecModel).all()
if status_spec is not None:
# Run status spec to get list of workflow specs applicable to this study
status_processor = WorkflowProcessor.create(study_id, status_spec)
status_processor.do_engine_steps()
status_data = status_processor.next_task().data
# Only add workflow specs listed in status spec
for spec in all_specs:
if spec.id in status_data and status_data[spec.id]:
WorkflowProcessor.create(study_id, spec.id)
else:
# No status spec. Just add all workflows.
for spec in all_specs:
WorkflowProcessor.create(study_id, spec.id)
@auth.login_required @auth.login_required
def update_study(study_id, body): def update_study(study_id, body):
@ -131,23 +160,63 @@ def post_update_study_from_protocol_builder(study_id):
@auth.login_required @auth.login_required
def get_study_workflows(study_id): def get_study_workflows(study_id):
workflow_models = session.query(WorkflowModel).filter_by(study_id=study_id).all()
# Get study
study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
# Get study status spec
status_spec: WorkflowSpecModel = session.query(WorkflowSpecModel)\
.filter_by(is_status=True).first()
status_data = None
if status_spec is not None:
# Run status spec
status_workflow_model: WorkflowModel = session.query(WorkflowModel)\
.filter_by(study_id=study.id)\
.filter_by(workflow_spec_id=status_spec.id)\
.first()
status_processor = WorkflowProcessor(status_workflow_model)
# Get list of active workflow specs for study
status_processor.do_engine_steps()
status_data = status_processor.bpmn_workflow.last_task.data
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.filter(WorkflowModel.workflow_spec_id != status_spec.id)\
.all()
else:
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.all()
api_models = [] api_models = []
for workflow_model in workflow_models: for workflow_model in workflow_models:
processor = WorkflowProcessor(workflow_model, processor = WorkflowProcessor(workflow_model,
workflow_model.bpmn_workflow_json) workflow_model.bpmn_workflow_json)
api_models.append(__get_workflow_api_model(processor)) api_models.append(__get_workflow_api_model(processor, status_data))
schema = WorkflowApiSchema(many=True) schema = WorkflowApiSchema(many=True)
return schema.dump(api_models) return schema.dump(api_models)
@auth.login_required @auth.login_required
def add_workflow_to_study(study_id, body): def add_workflow_to_study(study_id, body):
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id=body["id"]).first() workflow_spec_model: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=body["id"]).first()
if workflow_spec_model is None: if workflow_spec_model is None:
error = ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.') error = ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.')
return ApiErrorSchema.dump(error), 404 return ApiErrorSchema.dump(error), 404
processor = WorkflowProcessor.create(study_id, workflow_spec_model.id) processor = WorkflowProcessor.create(study_id, workflow_spec_model.id)
# If workflow spec is a status spec, update the study status spec
if workflow_spec_model.is_status:
study = session.query(StudyModel).filter_by(id=study_id).first()
study.status_spec_id = workflow_spec_model.id
study.status_spec_version = processor.get_spec_version()
session.add(study)
session.commit()
return WorkflowApiSchema().dump(__get_workflow_api_model(processor)) return WorkflowApiSchema().dump(__get_workflow_api_model(processor))
@ -194,3 +263,4 @@ def map_pb_study_to_study(pb_study):
study_info['inactive'] = False study_info['inactive'] = False
return study_info return study_info

View File

@ -1,12 +1,13 @@
import uuid import uuid
from crc.api.file import delete_file from crc.api.stats import update_workflow_stats, log_task_complete
from crc import session from crc import session, auth
from crc.api.common import ApiError, ApiErrorSchema from crc.api.common import ApiError, ApiErrorSchema
from crc.api.file import delete_file
from crc.models.api_models import Task, WorkflowApi, WorkflowApiSchema from crc.models.api_models import Task, WorkflowApi, WorkflowApiSchema
from crc.models.file import FileModel
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
from crc.models.file import FileModel
def all_specifications(): def all_specifications():
@ -14,13 +15,16 @@ def all_specifications():
return schema.dump(session.query(WorkflowSpecModel).all()) return schema.dump(session.query(WorkflowSpecModel).all())
@auth.login_required
def add_workflow_specification(body): def add_workflow_specification(body):
new_spec = WorkflowSpecModelSchema().load(body, session=session) new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
new_spec.is_status = new_spec.id == 'status'
session.add(new_spec) session.add(new_spec)
session.commit() session.commit()
return WorkflowSpecModelSchema().dump(new_spec) return WorkflowSpecModelSchema().dump(new_spec)
@auth.login_required
def get_workflow_specification(spec_id): def get_workflow_specification(spec_id):
if spec_id is None: if spec_id is None:
error = ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.') error = ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
@ -35,6 +39,7 @@ def get_workflow_specification(spec_id):
return WorkflowSpecModelSchema().dump(spec) return WorkflowSpecModelSchema().dump(spec)
@auth.login_required
def update_workflow_specification(spec_id, body): def update_workflow_specification(spec_id, body):
spec = WorkflowSpecModelSchema().load(body, session=session) spec = WorkflowSpecModelSchema().load(body, session=session)
spec.id = spec_id spec.id = spec_id
@ -43,6 +48,7 @@ def update_workflow_specification(spec_id, body):
return WorkflowSpecModelSchema().dump(spec) return WorkflowSpecModelSchema().dump(spec)
@auth.login_required
def delete_workflow_specification(spec_id): def delete_workflow_specification(spec_id):
if spec_id is None: if spec_id is None:
error = ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.') error = ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
@ -61,13 +67,17 @@ def delete_workflow_specification(spec_id):
session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).delete() session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).delete()
session.query(WorkflowSpecModel).filter_by(id=spec_id).delete() session.query(WorkflowSpecModel).filter_by(id=spec_id).delete()
session.commit() session.commit()
def __get_workflow_api_model(processor: WorkflowProcessor): def __get_workflow_api_model(processor: WorkflowProcessor, status_data=None):
spiff_tasks = processor.get_all_user_tasks() spiff_tasks = processor.get_all_user_tasks()
user_tasks = map(Task.from_spiff, spiff_tasks) user_tasks = list(map(Task.from_spiff, spiff_tasks))
is_active = True
if status_data is not None and processor.workflow_spec_id in status_data:
is_active = status_data[processor.workflow_spec_id]
workflow_api = WorkflowApi( workflow_api = WorkflowApi(
id=processor.get_workflow_id(), id=processor.get_workflow_id(),
status=processor.get_status(), status=processor.get_status(),
@ -76,29 +86,36 @@ def __get_workflow_api_model(processor: WorkflowProcessor):
user_tasks=user_tasks, user_tasks=user_tasks,
workflow_spec_id=processor.workflow_spec_id, workflow_spec_id=processor.workflow_spec_id,
spec_version=processor.get_spec_version(), spec_version=processor.get_spec_version(),
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id) is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id),
is_active=is_active
) )
if processor.next_task(): if processor.next_task():
workflow_api.next_task = Task.from_spiff(processor.next_task()) workflow_api.next_task = Task.from_spiff(processor.next_task())
return workflow_api return workflow_api
@auth.login_required
def get_workflow(workflow_id, soft_reset=False, hard_reset=False): def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
schema = WorkflowApiSchema() workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset) processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
return schema.dump(__get_workflow_api_model(processor)) workflow_api_model = __get_workflow_api_model(processor)
update_workflow_stats(workflow_model, workflow_api_model)
return WorkflowApiSchema().dump(workflow_api_model)
@auth.login_required
def delete(workflow_id): def delete(workflow_id):
session.query(WorkflowModel).filter_by(id=workflow_id).delete() session.query(WorkflowModel).filter_by(id=workflow_id).delete()
session.commit() session.commit()
@auth.login_required
def get_task(workflow_id, task_id): def get_task(workflow_id, task_id):
workflow = session.query(WorkflowModel).filter_by(id=workflow_id).first() workflow = session.query(WorkflowModel).filter_by(id=workflow_id).first()
return workflow.bpmn_workflow().get_task(task_id) return workflow.bpmn_workflow().get_task(task_id)
@auth.login_required
def update_task(workflow_id, task_id, body): def update_task(workflow_id, task_id, body):
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first() workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model) processor = WorkflowProcessor(workflow_model)
@ -111,5 +128,8 @@ def update_task(workflow_id, task_id, body):
workflow_model.bpmn_workflow_json = processor.serialize() workflow_model.bpmn_workflow_json = processor.serialize()
session.add(workflow_model) session.add(workflow_model)
session.commit() session.commit()
return WorkflowApiSchema().dump(__get_workflow_api_model(processor)
) workflow_api_model = __get_workflow_api_model(processor)
update_workflow_stats(workflow_model, workflow_api_model)
log_task_complete(workflow_model, task_id)
return WorkflowApiSchema().dump(workflow_api_model)

View File

@ -95,7 +95,8 @@ class TaskSchema(ma.Schema):
class WorkflowApi(object): class WorkflowApi(object):
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version, is_latest_spec): def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version,
is_latest_spec, is_active):
self.id = id self.id = id
self.status = status self.status = status
self.user_tasks = user_tasks self.user_tasks = user_tasks
@ -104,15 +105,14 @@ class WorkflowApi(object):
self.workflow_spec_id = workflow_spec_id self.workflow_spec_id = workflow_spec_id
self.spec_version = spec_version self.spec_version = spec_version
self.is_latest_spec = is_latest_spec self.is_latest_spec = is_latest_spec
self.is_active = is_active
class WorkflowApiSchema(ma.Schema): class WorkflowApiSchema(ma.Schema):
class Meta: class Meta:
model = WorkflowApi model = WorkflowApi
fields = ["id", "status", fields = ["id", "status", "user_tasks", "last_task", "next_task",
"user_tasks", "last_task", "next_task", "workflow_spec_id", "spec_version", "is_latest_spec", "is_active"]
"workflow_spec_id",
"spec_version", "is_latest_spec"]
unknown = INCLUDE unknown = INCLUDE
status = EnumField(WorkflowStatus) status = EnumField(WorkflowStatus)
@ -122,4 +122,7 @@ class WorkflowApiSchema(ma.Schema):
@marshmallow.post_load @marshmallow.post_load
def make_workflow(self, data, **kwargs): def make_workflow(self, data, **kwargs):
return WorkflowApi(**data) keys = ['id', 'status', 'user_tasks', 'last_task', 'next_task',
'workflow_spec_id', 'spec_version', 'is_latest_spec', "is_active"]
filtered_fields = {key: data[key] for key in keys}
return WorkflowApi(**filtered_fields)

View File

@ -1,7 +1,7 @@
import enum import enum
from marshmallow_enum import EnumField from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import ModelSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func from sqlalchemy import func
from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.dialects.postgresql import UUID
@ -76,6 +76,8 @@ class FileModel(db.Model):
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String) name = db.Column(db.String)
type = db.Column(db.Enum(FileType)) type = db.Column(db.Enum(FileType))
primary = db.Column(db.Boolean)
is_status = db.Column(db.Boolean)
content_type = db.Column(db.String) content_type = db.Column(db.String)
is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file. is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file.
primary = db.Column(db.Boolean) # Is this the primary BPMN in a workflow? primary = db.Column(db.Boolean) # Is this the primary BPMN in a workflow?
@ -87,8 +89,10 @@ class FileModel(db.Model):
latest_version = db.Column(db.Integer, default=0) latest_version = db.Column(db.Integer, default=0)
class FileModelSchema(ModelSchema): class FileModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:
model = FileModel model = FileModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys include_fk = True # Includes foreign keys
type = EnumField(FileType) type = EnumField(FileType)

45
crc/models/stats.py Normal file
View File

@ -0,0 +1,45 @@
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db
class WorkflowStatsModel(db.Model):
__tablename__ = 'workflow_stats'
id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=False)
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False)
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
spec_version = db.Column(db.String)
num_tasks_total = db.Column(db.Integer)
num_tasks_complete = db.Column(db.Integer)
num_tasks_incomplete = db.Column(db.Integer)
last_updated = db.Column(db.DateTime)
class WorkflowStatsModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = WorkflowStatsModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
class TaskEventModel(db.Model):
__tablename__ = 'task_event'
id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=False)
user_uid = db.Column(db.String, db.ForeignKey('user.uid'), nullable=False)
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False)
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
spec_version = db.Column(db.String)
task_id = db.Column(db.String)
task_state = db.Column(db.String)
date = db.Column(db.DateTime)
class TaskEventModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = TaskEventModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys

View File

@ -1,5 +1,5 @@
from marshmallow_enum import EnumField from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import ModelSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func from sqlalchemy import func
from crc import db from crc import db
@ -20,13 +20,15 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True) investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
inactive = db.Column(db.Boolean, default=False) inactive = db.Column(db.Boolean, default=False)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True) requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
status_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
status_spec_version = db.Column(db.String)
class StudyModelSchema(ModelSchema): class StudyModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:
model = StudyModel model = StudyModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys include_fk = True # Includes foreign keys
protocol_builder_status = EnumField(ProtocolBuilderStatus) protocol_builder_status = EnumField(ProtocolBuilderStatus)

View File

@ -1,7 +1,7 @@
import datetime import datetime
import jwt import jwt
from marshmallow_sqlalchemy import ModelSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db, app from crc import db, app
from crc.api.common import ApiError from crc.api.common import ApiError
@ -52,7 +52,9 @@ class UserModel(db.Model):
raise ApiError('token_invalid', 'The Authentication token you provided. You need a new token. ') raise ApiError('token_invalid', 'The Authentication token you provided. You need a new token. ')
class UserModelSchema(ModelSchema): class UserModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:
model = UserModel model = UserModel
load_instance = True
include_relationships = True

View File

@ -1,9 +1,22 @@
import enum import enum
from marshmallow_sqlalchemy import ModelSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db from crc import db
class WorkflowSpecCategoryModel(db.Model):
__tablename__ = 'workflow_spec_category'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
display_name = db.Column(db.String)
class WorkflowSpecCategoryModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = WorkflowSpecCategoryModel
load_instance = True
include_relationships = True
class WorkflowSpecModel(db.Model): class WorkflowSpecModel(db.Model):
__tablename__ = 'workflow_spec' __tablename__ = 'workflow_spec'
@ -12,11 +25,17 @@ class WorkflowSpecModel(db.Model):
display_name = db.Column(db.String) display_name = db.Column(db.String)
description = db.Column(db.Text) description = db.Column(db.Text)
primary_process_id = db.Column(db.String) primary_process_id = db.Column(db.String)
workflow_spec_category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
workflow_spec_category = db.relationship("WorkflowSpecCategoryModel")
is_status = db.Column(db.Boolean, default=False)
class WorkflowSpecModelSchema(ModelSchema): class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:
model = WorkflowSpecModel model = WorkflowSpecModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
class WorkflowStatus(enum.Enum): class WorkflowStatus(enum.Enum):
@ -34,4 +53,3 @@ class WorkflowModel(db.Model):
study_id = db.Column(db.Integer, db.ForeignKey('study.id')) study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id')) workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
spec_version = db.Column(db.String) spec_version = db.Column(db.String)

View File

@ -28,6 +28,11 @@ class CompleteTemplate(Script):
"the name of the docx template to use.") "the name of the docx template to use.")
file_name = args[0] file_name = args[0]
workflow_spec_model = self.find_spec_model_in_db(task.workflow) workflow_spec_model = self.find_spec_model_in_db(task.workflow)
task_study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if task_study_id != study_id:
raise ApiError(code="invalid_argument",
message="The given task does not match the given study.")
if workflow_spec_model is None: if workflow_spec_model is None:
raise ApiError(code="workflow_model_error", raise ApiError(code="workflow_model_error",
@ -44,7 +49,6 @@ class CompleteTemplate(Script):
"within workflow specification '%s'") % (args[0], workflow_spec_model.id) "within workflow specification '%s'") % (args[0], workflow_spec_model.id)
final_document_stream = self.make_template(BytesIO(file_data_model.data), task.data) final_document_stream = self.make_template(BytesIO(file_data_model.data), task.data)
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id, FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
name=file_name, name=file_name,

View File

@ -44,7 +44,8 @@ class StudyInfo(Script):
def get_required_docs(self, study_id): def get_required_docs(self, study_id):
required_docs = self.pb.get_required_docs(study_id) required_docs = self.pb.get_required_docs(study_id)
return required_docs

View File

@ -15,12 +15,13 @@ class FileService(object):
@staticmethod @staticmethod
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel, def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
name, content_type, binary_data, primary=False): name, content_type, binary_data, primary=False, is_status=False):
"""Create a new file and associate it with a workflow spec.""" """Create a new file and associate it with a workflow spec."""
file_model = FileModel( file_model = FileModel(
workflow_spec_id=workflow_spec.id, workflow_spec_id=workflow_spec.id,
name=name, name=name,
primary=primary primary=primary,
is_status=is_status
) )
if primary: if primary:
bpmn: ElementTree.Element = ElementTree.fromstring(binary_data) bpmn: ElementTree.Element = ElementTree.fromstring(binary_data)

View File

@ -204,7 +204,8 @@ class WorkflowProcessor(object):
dmn: ElementTree.Element = ElementTree.fromstring(file_data.data) dmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
parser.add_dmn_xml(dmn, filename=file_data.file_model.name) parser.add_dmn_xml(dmn, filename=file_data.file_model.name)
if process_id is None: if process_id is None:
raise(Exception("There is no primary BPMN model defined for workflow %s" % workflow_spec_id)) raise(ApiError(code="no_primary_bpmn_error",
message="There is no primary BPMN model defined for workflow %s" % workflow_spec_id))
try: try:
spec = parser.get_spec(process_id) spec = parser.get_spec(process_id)
except ValidationException as ve: except ValidationException as ve:
@ -237,7 +238,7 @@ class WorkflowProcessor(object):
session.add(workflow_model) session.add(workflow_model)
session.commit() session.commit()
# Need to commit twice, first to get a unique id for the workflow model, and # Need to commit twice, first to get a unique id for the workflow model, and
# a second time to store the serilaization so we can maintain this link within # a second time to store the serialization so we can maintain this link within
# the spiff-workflow process. # the spiff-workflow process.
bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
@ -337,7 +338,7 @@ class WorkflowProcessor(object):
process_elements.append(child) process_elements.append(child)
if len(process_elements) == 0: if len(process_elements) == 0:
raise Exception('No executable process tag found') raise ValidationException('No executable process tag found')
# There are multiple root elements # There are multiple root elements
if len(process_elements) > 1: if len(process_elements) > 1:
@ -349,6 +350,6 @@ class WorkflowProcessor(object):
if child_element.tag.endswith('startEvent'): if child_element.tag.endswith('startEvent'):
return this_element.attrib['id'] return this_element.attrib['id']
raise Exception('No start event found in %s' % et_root.attrib['id']) raise ValidationException('No start event found in %s' % et_root.attrib['id'])
return process_elements[0].attrib['id'] return process_elements[0].attrib['id']

View File

@ -82,11 +82,11 @@ class ExampleDataLoader:
returns an array of data models to be added to the database.""" returns an array of data models to be added to the database."""
global file global file
file_service = FileService() file_service = FileService()
spec = WorkflowSpecModel(id=id, spec = WorkflowSpecModel(id=id,
name=name, name=name,
display_name=display_name, display_name=display_name,
description=description) description=description,
is_status=id == 'status')
db.session.add(spec) db.session.add(spec)
db.session.commit() db.session.commit()
if not filepath: if not filepath:
@ -95,13 +95,15 @@ class ExampleDataLoader:
for file_path in files: for file_path in files:
noise, file_extension = os.path.splitext(file_path) noise, file_extension = os.path.splitext(file_path)
filename = os.path.basename(file_path) filename = os.path.basename(file_path)
is_primary = filename.lower() == id + ".bpmn"
is_status = filename.lower() == 'status.bpmn'
is_primary = filename.lower() == id + '.bpmn'
try: try:
file = open(file_path, "rb") file = open(file_path, 'rb')
data = file.read() data = file.read()
content_type = CONTENT_TYPES[file_extension[1:]] content_type = CONTENT_TYPES[file_extension[1:]]
file_service.add_workflow_spec_file(workflow_spec=spec, name=filename, content_type=content_type, file_service.add_workflow_spec_file(workflow_spec=spec, name=filename, content_type=content_type,
binary_data=data, primary=is_primary) binary_data=data, primary=is_primary, is_status=is_status)
except IsADirectoryError as de: except IsADirectoryError as de:
# Ignore sub directories # Ignore sub directories
pass pass

View File

@ -0,0 +1,41 @@
"""empty message
Revision ID: 5f06108116ae
Revises: 90dd63672e0a
Create Date: 2020-03-13 14:55:32.214380
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5f06108116ae'
down_revision = '90dd63672e0a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('workflow_spec_category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('display_name', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.add_column('file', sa.Column('is_status', sa.Boolean(), nullable=True))
op.add_column('workflow_spec', sa.Column('is_status', sa.Boolean(), nullable=True))
op.add_column('workflow_spec', sa.Column('workflow_spec_category_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'workflow_spec', 'workflow_spec_category', ['workflow_spec_category_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'workflow_spec', type_='foreignkey')
op.drop_column('workflow_spec', 'workflow_spec_category_id')
op.drop_column('workflow_spec', 'is_status')
op.drop_column('file', 'is_status')
op.drop_table('workflow_spec_category')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""empty message
Revision ID: 65f3fce6031a
Revises: 5f06108116ae
Create Date: 2020-03-15 12:40:42.314190
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '65f3fce6031a'
down_revision = '5f06108116ae'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('status_spec_id', sa.String(), nullable=True))
op.add_column('study', sa.Column('status_spec_version', sa.String(), nullable=True))
op.create_foreign_key(None, 'study', 'workflow_spec', ['status_spec_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'study', type_='foreignkey')
op.drop_column('study', 'status_spec_version')
op.drop_column('study', 'status_spec_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,59 @@
"""empty message
Revision ID: 90dd63672e0a
Revises: 8856126b6658
Create Date: 2020-03-10 21:16:38.827156
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '90dd63672e0a'
down_revision = '8856126b6658'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('task_event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('study_id', sa.Integer(), nullable=False),
sa.Column('user_uid', sa.String(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('workflow_spec_id', sa.String(), nullable=True),
sa.Column('spec_version', sa.String(), nullable=True),
sa.Column('task_id', sa.String(), nullable=True),
sa.Column('task_state', sa.String(), nullable=True),
sa.Column('date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ),
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
sa.ForeignKeyConstraint(['workflow_id'], ['workflow.id'], ),
sa.ForeignKeyConstraint(['workflow_spec_id'], ['workflow_spec.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('workflow_stats',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('study_id', sa.Integer(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('workflow_spec_id', sa.String(), nullable=True),
sa.Column('spec_version', sa.String(), nullable=True),
sa.Column('num_tasks_total', sa.Integer(), nullable=True),
sa.Column('num_tasks_complete', sa.Integer(), nullable=True),
sa.Column('num_tasks_incomplete', sa.Integer(), nullable=True),
sa.Column('last_updated', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ),
sa.ForeignKeyConstraint(['workflow_id'], ['workflow.id'], ),
sa.ForeignKeyConstraint(['workflow_spec_id'], ['workflow_spec.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('workflow_stats')
op.drop_table('task_event')
# ### end Alembic commands ###

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_data_security_plan" name="should_enable_crc2_training_session_data_security_plan">
<extensionElements>
<biodi:bounds x="190" y="80" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_1mjqwlv">
<input id="InputClause_18pwfqu" label="some_input">
<inputExpression id="LiteralExpression_1y84stb" typeRef="boolean" expressionLanguage="feel" />
</input>
<input id="InputClause_0ahp5b9">
<inputExpression id="LiteralExpression_1ptkp9l" typeRef="string">
<text></text>
</inputExpression>
</input>
<output id="OutputClause_05y0j7c" label="crc2_training_session_data_security_plan" name="crc2_training_session_data_security_plan" typeRef="boolean" />
<rule id="DecisionRule_17xsr74">
<inputEntry id="UnaryTests_05ldcq4">
<text>false</text>
</inputEntry>
<inputEntry id="UnaryTests_1ebers6">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_09oao3s">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0zavblw">
<inputEntry id="UnaryTests_09xdkib">
<text>true</text>
</inputEntry>
<inputEntry id="UnaryTests_1hqb6bs">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_0y2v9zc">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_enter_core_info" name="should_enable_crc2_training_session_enter_core_info">
<extensionElements>
<biodi:bounds x="170" y="60" width="180" height="80" />
</extensionElements>
<decisionTable id="decisionTable_1">
<input id="input_1" label="some_input">
<inputExpression id="inputExpression_1" typeRef="boolean" expressionLanguage="FEEL">
<text></text>
</inputExpression>
</input>
<output id="output_1" label="crc2_training_session_enter_core_info" name="crc2_training_session_enter_core_info" typeRef="boolean" />
<rule id="DecisionRule_10oo3ms">
<inputEntry id="UnaryTests_1ozg74s">
<text>false</text>
</inputEntry>
<outputEntry id="LiteralExpression_1d9565g">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_16ptqq8">
<inputEntry id="UnaryTests_1er4nj8">
<text>true</text>
</inputEntry>
<outputEntry id="LiteralExpression_1f34b59">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="should_enable_crc2_training_session_sponsor_funding_source" name="should_enable_crc2_training_session_sponsor_funding_source">
<extensionElements>
<biodi:bounds x="190" y="70" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="some_input">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="boolean" expressionLanguage="feel" />
</input>
<output id="OutputClause_1ybi1ud" label="crc2_training_session_sponsor_funding_source" name="crc2_training_session_sponsor_funding_source" typeRef="boolean" />
<rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2">
<text>false</text>
</inputEntry>
<outputEntry id="LiteralExpression_1pweuqc">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0j9wah0">
<inputEntry id="UnaryTests_0dzq6w4">
<text>true</text>
</inputEntry>
<outputEntry id="LiteralExpression_119lstj">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,121 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kudwnk" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_0jhpidf" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1ees8ka</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1ees8ka" sourceRef="StartEvent_1" targetRef="Activity_00rh8pw" />
<bpmn:businessRuleTask id="Activity_1k5eeun" name="Enable crc2_training_session_data_security_plan" camunda:decisionRef="should_enable_crc2_training_session_data_security_plan">
<bpmn:incoming>Flow_1nimppb</bpmn:incoming>
<bpmn:outgoing>Flow_1txrak2</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enable crc2_training_session_enter_core_info" camunda:decisionRef="should_enable_crc2_training_session_enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
<bpmn:outgoing>Flow_1sggkit</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:businessRuleTask id="Activity_16cm213" name="Enable crc2_training_session_sponsor_funding_source" camunda:decisionRef="should_enable_crc2_training_session_sponsor_funding_source">
<bpmn:incoming>Flow_18pl92p</bpmn:incoming>
<bpmn:outgoing>Flow_0x9580l</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:parallelGateway id="Gateway_1nta7st">
<bpmn:incoming>Flow_024q2cw</bpmn:incoming>
<bpmn:outgoing>Flow_1m8285h</bpmn:outgoing>
<bpmn:outgoing>Flow_1nimppb</bpmn:outgoing>
<bpmn:outgoing>Flow_18pl92p</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_1m8285h" sourceRef="Gateway_1nta7st" targetRef="Activity_1yqy50i" />
<bpmn:sequenceFlow id="Flow_1nimppb" sourceRef="Gateway_1nta7st" targetRef="Activity_1k5eeun" />
<bpmn:sequenceFlow id="Flow_18pl92p" sourceRef="Gateway_1nta7st" targetRef="Activity_16cm213" />
<bpmn:parallelGateway id="Gateway_12tpgcy">
<bpmn:incoming>Flow_1txrak2</bpmn:incoming>
<bpmn:incoming>Flow_1sggkit</bpmn:incoming>
<bpmn:incoming>Flow_0x9580l</bpmn:incoming>
<bpmn:outgoing>Flow_0pwtiqm</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:endEvent id="Event_135x8jg">
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0pwtiqm" sourceRef="Gateway_12tpgcy" targetRef="Event_135x8jg" />
<bpmn:sequenceFlow id="Flow_0x9580l" sourceRef="Activity_16cm213" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_1txrak2" sourceRef="Activity_1k5eeun" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_1sggkit" sourceRef="Activity_1yqy50i" targetRef="Gateway_12tpgcy" />
<bpmn:sequenceFlow id="Flow_024q2cw" sourceRef="Activity_00rh8pw" targetRef="Gateway_1nta7st" />
<bpmn:userTask id="Activity_00rh8pw" name="Enable specs" camunda:formKey="form_key">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="some_input" label="some_input" type="boolean" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>Flow_024q2cw</bpmn:outgoing>
</bpmn:userTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0jhpidf">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="211" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1ees8ka_di" bpmnElement="SequenceFlow_1ees8ka">
<di:waypoint x="188" y="229" />
<di:waypoint x="230" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1k5eeun_di" bpmnElement="Activity_1k5eeun">
<dc:Bounds x="460" y="189" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yqy50i_di" bpmnElement="Activity_1yqy50i">
<dc:Bounds x="460" y="80" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_16cm213_di" bpmnElement="Activity_16cm213">
<dc:Bounds x="460" y="300" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1m22g4p_di" bpmnElement="Gateway_1nta7st">
<dc:Bounds x="378" y="204" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1m8285h_di" bpmnElement="Flow_1m8285h">
<di:waypoint x="403" y="204" />
<di:waypoint x="403" y="120" />
<di:waypoint x="460" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1nimppb_di" bpmnElement="Flow_1nimppb">
<di:waypoint x="428" y="229" />
<di:waypoint x="460" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_18pl92p_di" bpmnElement="Flow_18pl92p">
<di:waypoint x="403" y="254" />
<di:waypoint x="403" y="340" />
<di:waypoint x="460" y="340" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1kk6x70_di" bpmnElement="Gateway_12tpgcy">
<dc:Bounds x="595" y="204" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_135x8jg_di" bpmnElement="Event_135x8jg">
<dc:Bounds x="682" y="211" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0pwtiqm_di" bpmnElement="Flow_0pwtiqm">
<di:waypoint x="645" y="229" />
<di:waypoint x="682" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0x9580l_di" bpmnElement="Flow_0x9580l">
<di:waypoint x="560" y="340" />
<di:waypoint x="620" y="340" />
<di:waypoint x="620" y="254" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1txrak2_di" bpmnElement="Flow_1txrak2">
<di:waypoint x="560" y="229" />
<di:waypoint x="595" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1sggkit_di" bpmnElement="Flow_1sggkit">
<di:waypoint x="560" y="120" />
<di:waypoint x="620" y="120" />
<di:waypoint x="620" y="204" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_024q2cw_di" bpmnElement="Flow_024q2cw">
<di:waypoint x="330" y="229" />
<di:waypoint x="378" y="229" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0vfs7g0_di" bpmnElement="Activity_00rh8pw">
<dc:Bounds x="230" y="189" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -3,7 +3,7 @@ from datetime import datetime, timezone
from unittest.mock import patch, Mock from unittest.mock import patch, Mock
from crc import session from crc import session
from crc.models.api_models import WorkflowApiSchema from crc.models.api_models import WorkflowApiSchema, WorkflowApi
from crc.models.study import StudyModel, StudyModelSchema from crc.models.study import StudyModel, StudyModelSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \ from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
ProtocolBuilderStudySchema ProtocolBuilderStudySchema
@ -160,9 +160,6 @@ class TestStudyApi(BaseTest):
rv = self.app.delete('/v1.0/study/%i' % study.id) rv = self.app.delete('/v1.0/study/%i' % study.id)
self.assert_failure(rv, error_code="study_integrity_error") self.assert_failure(rv, error_code="study_integrity_error")
def test_delete_workflow(self): def test_delete_workflow(self):
self.load_example_data() self.load_example_data()
study = session.query(StudyModel).first() study = session.query(StudyModel).first()
@ -174,7 +171,7 @@ class TestStudyApi(BaseTest):
self.assertEqual(1, session.query(WorkflowModel).count()) self.assertEqual(1, session.query(WorkflowModel).count())
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
workflow = WorkflowApiSchema().load(json_data) workflow = WorkflowApiSchema().load(json_data)
rv = self.app.delete('/v1.0/workflow/%i' % workflow.id) rv = self.app.delete('/v1.0/workflow/%i' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
self.assertEqual(0, session.query(WorkflowModel).count()) self.assertEqual(0, session.query(WorkflowModel).count())
@ -207,3 +204,79 @@ class TestStudyApi(BaseTest):
json_data_after = json.loads(response_after.get_data(as_text=True)) json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after) workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(1, len(workflows_after)) self.assertEqual(1, len(workflows_after))
"""
Workflow Specs that have been made available (or not) to a particular study via the status.bpmn should be flagged
as available (or not) when the list of a study's workflows is retrieved.
"""
def test_workflow_spec_status(self):
self.load_example_data()
study = session.query(StudyModel).first()
study_id = study.id
# Add status workflow
self.load_test_spec('status')
# Add status workflow to the study
status_spec = session.query(WorkflowSpecModel).filter_by(is_status=True).first()
add_status_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(status_spec)))
self.assert_success(add_status_response)
json_data_status = json.loads(add_status_response.get_data(as_text=True))
status_workflow: WorkflowApi = WorkflowApiSchema().load(json_data_status)
self.assertIsNotNone(status_workflow)
self.assertIsNotNone(status_workflow.workflow_spec_id)
self.assertIsNotNone(status_workflow.spec_version)
self.assertIsNotNone(status_workflow.next_task)
self.assertIsNotNone(status_workflow.next_task['id'])
status_task_id = status_workflow.next_task['id']
# Verify that the study status spec is populated
updated_study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
self.assertIsNotNone(updated_study)
self.assertIsNotNone(updated_study.status_spec_id)
self.assertIsNotNone(updated_study.status_spec_version)
self.assertEqual(updated_study.status_spec_id, status_workflow.workflow_spec_id)
self.assertEqual(updated_study.status_spec_version, status_workflow.spec_version)
# Add all available non-status workflows to the study
specs = session.query(WorkflowSpecModel).filter_by(is_status=False).all()
for spec in specs:
add_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(add_response)
for is_active in [False, True]:
# Set all workflow specs to inactive|active
update_status_response = self.app.put('/v1.0/workflow/%i/task/%s/data' % (status_workflow.id, status_task_id),
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps({'some_input': is_active}))
self.assert_success(update_status_response)
json_workflow_api = json.loads(update_status_response.get_data(as_text=True))
updated_workflow_api: WorkflowApi = WorkflowApiSchema().load(json_workflow_api)
self.assertIsNotNone(updated_workflow_api)
self.assertEqual(updated_workflow_api.status, WorkflowStatus.complete)
self.assertIsNotNone(updated_workflow_api.last_task)
self.assertIsNotNone(updated_workflow_api.last_task['data'])
self.assertIsNotNone(updated_workflow_api.last_task['data']['some_input'])
self.assertEqual(updated_workflow_api.last_task['data']['some_input'], is_active)
# List workflows for study
response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_after)
json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(len(specs), len(workflows_after))
# All workflows should be inactive|active
for workflow in workflows_after:
self.assertEqual(workflow.is_active, is_active)

View File

@ -4,6 +4,7 @@ import os
from crc import session, app from crc import session, app
from crc.models.api_models import WorkflowApiSchema, Task from crc.models.api_models import WorkflowApiSchema, Task
from crc.models.file import FileModelSchema from crc.models.file import FileModelSchema
from crc.models.stats import WorkflowStatsModel, TaskEventModel
from crc.models.study import StudyModel from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus from crc.models.workflow import WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
@ -28,6 +29,7 @@ class TestTasksApi(BaseTest):
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False): def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False):
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' % rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, str(soft_reset), str(hard_reset)), (workflow.id, str(soft_reset), str(hard_reset)),
headers=self.logged_in_headers(),
content_type="application/json") content_type="application/json")
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
@ -37,10 +39,24 @@ class TestTasksApi(BaseTest):
def complete_form(self, workflow, task, dict_data): def complete_form(self, workflow, task, dict_data):
rv = self.app.put('/v1.0/workflow/%i/task/%s/data' % (workflow.id, task.id), rv = self.app.put('/v1.0/workflow/%i/task/%s/data' % (workflow.id, task.id),
headers=self.logged_in_headers(),
content_type="application/json", content_type="application/json",
data=json.dumps(dict_data)) data=json.dumps(dict_data))
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
num_stats = session.query(WorkflowStatsModel) \
.filter_by(workflow_id=workflow.id) \
.filter_by(workflow_spec_id=workflow.workflow_spec_id) \
.count()
self.assertGreater(num_stats, 0)
num_task_events = session.query(TaskEventModel) \
.filter_by(workflow_id=workflow.id) \
.filter_by(task_id=task.id) \
.count()
self.assertGreater(num_task_events, 0)
workflow = WorkflowApiSchema().load(json_data) workflow = WorkflowApiSchema().load(json_data)
return workflow return workflow
@ -143,9 +159,9 @@ class TestTasksApi(BaseTest):
} }
workflow_api = self.complete_form(workflow, tasks[0], data) workflow_api = self.complete_form(workflow, tasks[0], data)
self.assertIsNotNone(workflow_api.next_task) self.assertIsNotNone(workflow_api.next_task)
self.assertEquals("EndEvent_0evb22x", workflow_api.next_task['name']) self.assertEqual("EndEvent_0evb22x", workflow_api.next_task['name'])
self.assertTrue(workflow_api.status == WorkflowStatus.complete) self.assertTrue(workflow_api.status == WorkflowStatus.complete)
rv = self.app.get('/v1.0/file?workflow_id=%i' % workflow.id) rv = self.app.get('/v1.0/file?workflow_id=%i' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
files = FileModelSchema(many=True).load(json_data, session=session) files = FileModelSchema(many=True).load(json_data, session=session)
@ -245,10 +261,38 @@ class TestTasksApi(BaseTest):
# perform a soft reset returns an error # perform a soft reset returns an error
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' % rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, "true", "false"), (workflow.id, "true", "false"),
content_type="application/json") content_type="application/json",
headers=self.logged_in_headers())
self.assert_failure(rv, error_code="unexpected_workflow_structure") self.assert_failure(rv, error_code="unexpected_workflow_structure")
# Try again without a soft reset, and we are still ok, and on the original version. # Try again without a soft reset, and we are still ok, and on the original version.
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertTrue(workflow_api.spec_version.startswith("v1 ")) self.assertTrue(workflow_api.spec_version.startswith("v1 "))
self.assertFalse(workflow_api.is_latest_spec) self.assertFalse(workflow_api.is_latest_spec)
def test_get_workflow_stats(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
response_before = self.app.get('/v1.0/workflow/%i/stats' % workflow.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_before)
db_stats_before = session.query(WorkflowStatsModel).filter_by(workflow_id=workflow.id).first()
self.assertIsNone(db_stats_before)
# Start the workflow.
tasks = self.get_workflow_api(workflow).user_tasks
self.complete_form(workflow, tasks[0], {"has_bananas": True})
response_after = self.app.get('/v1.0/workflow/%i/stats' % workflow.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_after)
db_stats_after = session.query(WorkflowStatsModel).filter_by(workflow_id=workflow.id).first()
self.assertIsNotNone(db_stats_after)
self.assertGreater(db_stats_after.num_tasks_complete, 0)
self.assertGreater(db_stats_after.num_tasks_incomplete, 0)
self.assertGreater(db_stats_after.num_tasks_total, 0)
self.assertEqual(db_stats_after.num_tasks_total,
db_stats_after.num_tasks_complete + db_stats_after.num_tasks_incomplete)

View File

@ -165,7 +165,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first() study = session.query(StudyModel).first()
with self.assertRaises(ApiError) as context: with self.assertRaises(ApiError) as context:
WorkflowProcessor.create(study.id, workflow_spec_model.id) WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.assertEquals("workflow_validation_error", context.exception.code) self.assertEqual("workflow_validation_error", context.exception.code)
self.assertTrue("bpmn:startEvent" in context.exception.message) self.assertTrue("bpmn:startEvent" in context.exception.message)
def test_workflow_spec_key_error(self): def test_workflow_spec_key_error(self):
@ -311,14 +311,14 @@ class TestWorkflowProcessor(BaseTest):
# Setting up another processor should not error out, but doesn't pick up the update. # Setting up another processor should not error out, but doesn't pick up the update.
workflow_model.bpmn_workflow_json = processor.serialize() workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(workflow_model) processor2 = WorkflowProcessor(workflow_model)
self.assertEquals("Step 1", processor2.bpmn_workflow.last_task.task_spec.description) self.assertEqual("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
self.assertNotEquals("# This is some documentation I wanted to add.", self.assertNotEqual("# This is some documentation I wanted to add.",
processor2.bpmn_workflow.last_task.task_spec.documentation) processor2.bpmn_workflow.last_task.task_spec.documentation)
# You can do a soft update and get the right response. # You can do a soft update and get the right response.
processor3 = WorkflowProcessor(workflow_model, soft_reset=True) processor3 = WorkflowProcessor(workflow_model, soft_reset=True)
self.assertEquals("Step 1", processor3.bpmn_workflow.last_task.task_spec.description) self.assertEqual("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
self.assertEquals("# This is some documentation I wanted to add.", self.assertEqual("# This is some documentation I wanted to add.",
processor3.bpmn_workflow.last_task.task_spec.documentation) processor3.bpmn_workflow.last_task.task_spec.documentation)
@ -336,7 +336,7 @@ class TestWorkflowProcessor(BaseTest):
task.data = {"color": "blue"} task.data = {"color": "blue"}
processor.complete_task(task) processor.complete_task(task)
next_task = processor.next_task() next_task = processor.next_task()
self.assertEquals("Step 2", next_task.task_spec.description) self.assertEqual("Step 2", next_task.task_spec.description)
# Modify the specification, with a major change that alters the flow and can't be serialized effectively. # Modify the specification, with a major change that alters the flow and can't be serialized effectively.
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn') file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn')
@ -349,13 +349,40 @@ class TestWorkflowProcessor(BaseTest):
# Do a hard reset, which should bring us back to the beginning, but retain the data. # Do a hard reset, which should bring us back to the beginning, but retain the data.
processor3 = WorkflowProcessor(workflow_model, hard_reset=True) processor3 = WorkflowProcessor(workflow_model, hard_reset=True)
self.assertEquals("Step 1", processor3.next_task().task_spec.description) self.assertEqual("Step 1", processor3.next_task().task_spec.description)
self.assertEquals({"color": "blue"}, processor3.next_task().data) self.assertEqual({"color": "blue"}, processor3.next_task().data)
processor3.complete_task(processor3.next_task()) processor3.complete_task(processor3.next_task())
self.assertEquals("New Step", processor3.next_task().task_spec.description) self.assertEqual("New Step", processor3.next_task().task_spec.description)
self.assertEquals({"color": "blue"}, processor3.next_task().data) self.assertEqual({"color": "blue"}, processor3.next_task().data)
def test_get_latest_spec_version(self): def test_get_latest_spec_version(self):
workflow_spec_model = self.load_test_spec("two_forms") workflow_spec_model = self.load_test_spec("two_forms")
version = WorkflowProcessor.get_latest_version_string("two_forms") version = WorkflowProcessor.get_latest_version_string("two_forms")
self.assertTrue(version.startswith("v1 ")) self.assertTrue(version.startswith("v1 "))
def test_status_bpmn(self):
self.load_example_data()
specs = session.query(WorkflowSpecModel).all()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("status")
for enabled in [True, False]:
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
task = processor.next_task()
# Turn all specs on or off
task.data = {"some_input": enabled}
processor.complete_task(task)
# Finish out rest of workflow
while processor.get_status() == WorkflowStatus.waiting:
task = processor.next_task()
processor.complete_task(task)
self.assertEqual(processor.get_status(), WorkflowStatus.complete)
# Enabled status of all specs should match the value set in the first task
for spec in specs:
self.assertEqual(task.data[spec.id], enabled)

View File

@ -2,7 +2,7 @@ import json
from crc import session from crc import session
from crc.models.file import FileModel from crc.models.file import FileModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
from tests.base_test import BaseTest from tests.base_test import BaseTest
@ -25,9 +25,11 @@ class TestWorkflowSpec(BaseTest):
def test_add_new_workflow_specification(self): def test_add_new_workflow_specification(self):
self.load_example_data(); self.load_example_data();
num_before = session.query(WorkflowSpecModel).count() num_before = session.query(WorkflowSpecModel).count()
spec = WorkflowSpecModel(id='make_cookies', display_name='Cooooookies', spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies',
description='Om nom nom delicious cookies') description='Om nom nom delicious cookies')
rv = self.app.post('/v1.0/workflow-specification', content_type="application/json", rv = self.app.post('/v1.0/workflow-specification',
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps(WorkflowSpecModelSchema().dump(spec))) data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(rv) self.assert_success(rv)
db_spec = session.query(WorkflowSpecModel).filter_by(id='make_cookies').first() db_spec = session.query(WorkflowSpecModel).filter_by(id='make_cookies').first()
@ -38,12 +40,38 @@ class TestWorkflowSpec(BaseTest):
def test_get_workflow_specification(self): def test_get_workflow_specification(self):
self.load_example_data() self.load_example_data()
db_spec = session.query(WorkflowSpecModel).first() db_spec = session.query(WorkflowSpecModel).first()
rv = self.app.get('/v1.0/workflow-specification/%s' % db_spec.id) rv = self.app.get('/v1.0/workflow-specification/%s' % db_spec.id, headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
api_spec = WorkflowSpecModelSchema().load(json_data, session=session) api_spec = WorkflowSpecModelSchema().load(json_data, session=session)
self.assertEqual(db_spec, api_spec) self.assertEqual(db_spec, api_spec)
def test_update_workflow_specification(self):
self.load_example_data()
category = WorkflowSpecCategoryModel(id=0, name='trap', display_name="It's a trap!")
session.add(category)
session.commit()
db_spec_before: WorkflowSpecModel = session.query(WorkflowSpecModel).first()
spec_id = db_spec_before.id
self.assertIsNone(db_spec_before.workflow_spec_category_id)
db_spec_before.workflow_spec_category_id = 0
rv = self.app.put('/v1.0/workflow-specification/%s' % spec_id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(db_spec_before)))
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
api_spec = WorkflowSpecModelSchema().load(json_data, session=session)
self.assertEqual(db_spec_before, api_spec)
db_spec_after: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
self.assertIsNotNone(db_spec_after.workflow_spec_category_id)
self.assertIsNotNone(db_spec_after.workflow_spec_category)
self.assertEqual(db_spec_after.workflow_spec_category.display_name, category.display_name)
def test_delete_workflow_specification(self): def test_delete_workflow_specification(self):
self.load_example_data() self.load_example_data()
spec_id = 'random_fact' spec_id = 'random_fact'
@ -55,7 +83,7 @@ class TestWorkflowSpec(BaseTest):
num_workflows_before = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count() num_workflows_before = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count()
self.assertGreater(num_files_before + num_workflows_before, 0) self.assertGreater(num_files_before + num_workflows_before, 0)
rv = self.app.delete('/v1.0/workflow-specification/' + spec_id) rv = self.app.delete('/v1.0/workflow-specification/' + spec_id, headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
num_specs_after = session.query(WorkflowSpecModel).filter_by(id=spec_id).count() num_specs_after = session.query(WorkflowSpecModel).filter_by(id=spec_id).count()