Merge pull request #17 from sartography/feature/pb_services
Workflow Versioning
This commit is contained in:
commit
c3b244fc7a
46
crc/api.yml
46
crc/api.yml
|
@ -533,6 +533,19 @@ paths:
|
|||
get:
|
||||
operationId: crc.api.workflow.get_workflow
|
||||
summary: Detailed information for a specific workflow instance
|
||||
parameters:
|
||||
- name: soft_reset
|
||||
in: query
|
||||
required: false
|
||||
description: Set this to true to use the latest workflow specification to load minor modifications to the spec.
|
||||
schema:
|
||||
type: boolean
|
||||
- name: hard_reset
|
||||
in: query
|
||||
required: false
|
||||
description: Set this to true to reset the workflow
|
||||
schema:
|
||||
type: boolean
|
||||
tags:
|
||||
- Workflows and Tasks
|
||||
responses:
|
||||
|
@ -791,31 +804,34 @@ components:
|
|||
status:
|
||||
type: enum
|
||||
enum: ['new','user_input_required','waiting','complete']
|
||||
last_task:
|
||||
$ref: "#/components/schemas/Task"
|
||||
next_task:
|
||||
$ref: "#/components/schemas/Task"
|
||||
user_tasks:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
last_task:
|
||||
$ref: "#/components/schemas/Task"
|
||||
next_task:
|
||||
$ref: "#/components/schemas/Task"
|
||||
workflow_spec_id:
|
||||
type: string
|
||||
spec_version:
|
||||
type: string
|
||||
is_latest_spec:
|
||||
type: boolean
|
||||
|
||||
example:
|
||||
id: 291234
|
||||
status: 'user_input_required'
|
||||
current_task_ids: ['study_identification','Detailed Reports']
|
||||
workflow_spec:
|
||||
id: 'prot_def'
|
||||
display_name: 'Protocol Definition'
|
||||
description: 'Collect some additional information about your protocol to complete forms and processes.'
|
||||
bpmn_url: 'https://crconnect.viriginia.edu/prot_def.bpmn'
|
||||
svg_url: 'https://crconnect.viriginia.edu/prot_def.svg'
|
||||
messages: [
|
||||
"Protocol Builder reports that the protocol process is complete for this study.",
|
||||
"IDS Submission Template was generated successfully."
|
||||
]
|
||||
workflow_spec_id: 'random_fact'
|
||||
spec_version: 'v1.1 [22,23]'
|
||||
is_latest_spec: True
|
||||
next_task:
|
||||
id: study_identification
|
||||
name: Study Identification
|
||||
title: IRB Review
|
||||
documentation: "# Heading 1\n\nMarkdown documentation text goes here"
|
||||
type: form
|
||||
state: ready
|
||||
Task:
|
||||
properties:
|
||||
id:
|
||||
|
|
|
@ -8,6 +8,7 @@ from flask import send_file
|
|||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileModelSchema, FileModel, FileDataModel, FileType
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.file_service import FileService
|
||||
|
||||
|
||||
|
@ -34,7 +35,8 @@ def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=Non
|
|||
|
||||
file = connexion.request.files['file']
|
||||
if workflow_spec_id:
|
||||
file_model = FileService.add_workflow_spec_file(workflow_spec_id, file.filename, file.content_type, file.stream.read())
|
||||
workflow_spec = session.query(WorkflowSpecModel).filter_by(id=workflow_spec_id).first()
|
||||
file_model = FileService.add_workflow_spec_file(workflow_spec, file.filename, file.content_type, file.stream.read())
|
||||
else:
|
||||
file_model = FileService.add_form_field_file(study_id, workflow_id, task_id, form_field_key, file.filename, file.content_type, file.stream.read())
|
||||
|
||||
|
|
|
@ -6,9 +6,10 @@ from flask import g
|
|||
from crc import session, auth
|
||||
from crc.api.common import ApiError, ApiErrorSchema
|
||||
from crc.api.workflow import __get_workflow_api_model
|
||||
from crc.models.api_models import WorkflowApiSchema
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudy
|
||||
from crc.models.study import StudyModelSchema, StudyModel
|
||||
from crc.models.workflow import WorkflowModel, WorkflowApiSchema, WorkflowSpecModel, WorkflowApi
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModel
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
|
||||
|
@ -120,7 +121,7 @@ def get_study_workflows(study_id):
|
|||
workflow_models = session.query(WorkflowModel).filter_by(study_id=study_id).all()
|
||||
api_models = []
|
||||
for workflow_model in workflow_models:
|
||||
processor = WorkflowProcessor(workflow_model.workflow_spec_id,
|
||||
processor = WorkflowProcessor(workflow_model,
|
||||
workflow_model.bpmn_workflow_json)
|
||||
api_models.append(__get_workflow_api_model(processor))
|
||||
schema = WorkflowApiSchema(many=True)
|
||||
|
|
|
@ -3,8 +3,8 @@ import uuid
|
|||
from crc.api.file import delete_file
|
||||
from crc import session
|
||||
from crc.api.common import ApiError, ApiErrorSchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, \
|
||||
Task, WorkflowApiSchema, WorkflowApi
|
||||
from crc.models.api_models import Task, WorkflowApi, WorkflowApiSchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.models.file import FileModel
|
||||
|
||||
|
@ -74,17 +74,19 @@ def __get_workflow_api_model(processor: WorkflowProcessor):
|
|||
last_task=Task.from_spiff(processor.bpmn_workflow.last_task),
|
||||
next_task=None,
|
||||
user_tasks=user_tasks,
|
||||
workflow_spec_id=processor.workflow_spec_id
|
||||
workflow_spec_id=processor.workflow_spec_id,
|
||||
spec_version=processor.get_spec_version(),
|
||||
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id)
|
||||
)
|
||||
if(processor.next_task()):
|
||||
if processor.next_task():
|
||||
workflow_api.next_task = Task.from_spiff(processor.next_task())
|
||||
return workflow_api
|
||||
|
||||
def get_workflow(workflow_id):
|
||||
|
||||
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
|
||||
schema = WorkflowApiSchema()
|
||||
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||
processor = WorkflowProcessor(workflow_model.workflow_spec_id,
|
||||
workflow_model.bpmn_workflow_json)
|
||||
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
|
||||
return schema.dump(__get_workflow_api_model(processor))
|
||||
|
||||
|
||||
|
@ -99,7 +101,7 @@ def get_task(workflow_id, task_id):
|
|||
|
||||
def update_task(workflow_id, task_id, body):
|
||||
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||
processor = WorkflowProcessor(workflow_model.workflow_spec_id, workflow_model.bpmn_workflow_json)
|
||||
processor = WorkflowProcessor(workflow_model)
|
||||
task_id = uuid.UUID(task_id)
|
||||
task = processor.bpmn_workflow.get_task(task_id)
|
||||
task.data = body
|
||||
|
|
|
@ -0,0 +1,124 @@
|
|||
import jinja2
|
||||
import marshmallow
|
||||
from jinja2 import Template
|
||||
from marshmallow import INCLUDE
|
||||
from marshmallow_enum import EnumField
|
||||
|
||||
from crc import ma
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.workflow import WorkflowStatus
|
||||
|
||||
|
||||
class Task(object):
|
||||
def __init__(self, id, name, title, type, state, form, documentation, data):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.title = title
|
||||
self.type = type
|
||||
self.state = state
|
||||
self.form = form
|
||||
self.documentation = documentation
|
||||
self.data = data
|
||||
|
||||
@classmethod
|
||||
def from_spiff(cls, spiff_task):
|
||||
documentation = spiff_task.task_spec.documentation if hasattr(spiff_task.task_spec, "documentation") else ""
|
||||
instance = cls(spiff_task.id,
|
||||
spiff_task.task_spec.name,
|
||||
spiff_task.task_spec.description,
|
||||
spiff_task.task_spec.__class__.__name__,
|
||||
spiff_task.get_state_name(),
|
||||
None,
|
||||
documentation,
|
||||
spiff_task.data)
|
||||
if hasattr(spiff_task.task_spec, "form"):
|
||||
instance.form = spiff_task.task_spec.form
|
||||
if documentation != "" and documentation is not None:
|
||||
|
||||
instance.process_documentation(documentation)
|
||||
return instance
|
||||
|
||||
def process_documentation(self, documentation):
|
||||
'''Runs markdown documentation through the Jinja2 processor to inject data
|
||||
create loops, etc...'''
|
||||
|
||||
template = Template(documentation)
|
||||
try:
|
||||
self.documentation = template.render(**self.data)
|
||||
except jinja2.exceptions.UndefinedError as ue:
|
||||
raise ApiError(code="template_error", message="Error processing template for task %s: %s" %
|
||||
(self.name, str(ue)), status_code=500)
|
||||
|
||||
class OptionSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ValidationSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["name", "config"]
|
||||
|
||||
|
||||
class PropertiesSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "value"]
|
||||
|
||||
|
||||
class FormFieldSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = [
|
||||
"id", "type", "label", "default_value", "options", "validation", "properties", "value"
|
||||
]
|
||||
|
||||
default_value = marshmallow.fields.String(required=False, allow_none=True)
|
||||
options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema))
|
||||
validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema))
|
||||
properties = marshmallow.fields.List(marshmallow.fields.Nested(PropertiesSchema))
|
||||
|
||||
|
||||
class FormSchema(ma.Schema):
|
||||
key = marshmallow.fields.String(required=True, allow_none=False)
|
||||
fields = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldSchema))
|
||||
|
||||
|
||||
class TaskSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "name", "title", "type", "state", "form", "documentation", "data"]
|
||||
|
||||
documentation = marshmallow.fields.String(required=False, allow_none=True)
|
||||
form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
|
||||
title = marshmallow.fields.String(required=False, allow_none=True)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_task(self, data, **kwargs):
|
||||
return Task(**data)
|
||||
|
||||
|
||||
class WorkflowApi(object):
|
||||
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version, is_latest_spec):
|
||||
self.id = id
|
||||
self.status = status
|
||||
self.user_tasks = user_tasks
|
||||
self.last_task = last_task
|
||||
self.next_task = next_task
|
||||
self.workflow_spec_id = workflow_spec_id
|
||||
self.spec_version = spec_version
|
||||
self.is_latest_spec = is_latest_spec
|
||||
|
||||
class WorkflowApiSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowApi
|
||||
fields = ["id", "status",
|
||||
"user_tasks", "last_task", "next_task",
|
||||
"workflow_spec_id",
|
||||
"spec_version", "is_latest_spec"]
|
||||
unknown = INCLUDE
|
||||
|
||||
status = EnumField(WorkflowStatus)
|
||||
user_tasks = marshmallow.fields.List(marshmallow.fields.Nested(TaskSchema, dump_only=True))
|
||||
last_task = marshmallow.fields.Nested(TaskSchema, dump_only=True)
|
||||
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_workflow(self, data, **kwargs):
|
||||
return WorkflowApi(**data)
|
|
@ -3,6 +3,7 @@ import enum
|
|||
from marshmallow_enum import EnumField
|
||||
from marshmallow_sqlalchemy import ModelSchema
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
from crc import db
|
||||
|
||||
|
@ -30,10 +31,36 @@ class FileType(enum.Enum):
|
|||
zip = 'zip'
|
||||
|
||||
|
||||
CONTENT_TYPES = {
|
||||
"bpmn": "text/xml",
|
||||
"csv": "text/csv",
|
||||
"dmn": "text/xml",
|
||||
"doc": "application/msword",
|
||||
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"gif": "image/gif",
|
||||
"jpg": "image/jpeg",
|
||||
"md" : "text/plain",
|
||||
"pdf": "application/pdf",
|
||||
"png": "image/png",
|
||||
"ppt": "application/vnd.ms-powerpoint",
|
||||
"pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
"rtf": "application/rtf",
|
||||
"svg": "image/svg+xml",
|
||||
"svg_xml": "image/svg+xml",
|
||||
"txt": "text/plain",
|
||||
"xls": "application/vnd.ms-excel",
|
||||
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"xml": "application/xml",
|
||||
"zip": "application/zip"
|
||||
}
|
||||
|
||||
class FileDataModel(db.Model):
|
||||
__tablename__ = 'file_data'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
md5_hash = db.Column(UUID(as_uuid=True), unique=False, nullable=False)
|
||||
data = db.Column(db.LargeBinary)
|
||||
version = db.Column(db.Integer, default=0)
|
||||
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
|
||||
file_model_id = db.Column(db.Integer, db.ForeignKey('file.id'))
|
||||
file_model = db.relationship("FileModel")
|
||||
|
||||
|
@ -42,8 +69,6 @@ class FileModel(db.Model):
|
|||
__tablename__ = 'file'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String)
|
||||
version = db.Column(db.Integer, default=0)
|
||||
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
|
||||
type = db.Column(db.Enum(FileType))
|
||||
primary = db.Column(db.Boolean)
|
||||
content_type = db.Column(db.String)
|
||||
|
@ -52,6 +77,7 @@ class FileModel(db.Model):
|
|||
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=True)
|
||||
task_id = db.Column(db.String, nullable=True)
|
||||
form_field_key = db.Column(db.String, nullable=True)
|
||||
latest_version = db.Column(db.Integer, default=0)
|
||||
|
||||
|
||||
class FileModelSchema(ModelSchema):
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
import enum
|
||||
|
||||
import marshmallow
|
||||
from jinja2 import Environment, BaseLoader, Undefined, Template
|
||||
from marshmallow import INCLUDE
|
||||
from marshmallow_enum import EnumField
|
||||
from marshmallow_sqlalchemy import ModelSchema
|
||||
|
||||
from crc import db, ma
|
||||
from crc import db
|
||||
|
||||
|
||||
class WorkflowSpecModel(db.Model):
|
||||
|
@ -37,110 +33,5 @@ class WorkflowModel(db.Model):
|
|||
status = db.Column(db.Enum(WorkflowStatus))
|
||||
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
|
||||
spec_version = db.Column(db.String)
|
||||
|
||||
class Task(object):
|
||||
def __init__(self, id, name, title, type, state, form, documentation, data):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.title = title
|
||||
self.type = type
|
||||
self.state = state
|
||||
self.form = form
|
||||
self.documentation = documentation
|
||||
self.data = data
|
||||
|
||||
@classmethod
|
||||
def from_spiff(cls, spiff_task):
|
||||
documentation = spiff_task.task_spec.documentation if hasattr(spiff_task.task_spec, "documentation") else ""
|
||||
instance = cls(spiff_task.id,
|
||||
spiff_task.task_spec.name,
|
||||
spiff_task.task_spec.description,
|
||||
spiff_task.task_spec.__class__.__name__,
|
||||
spiff_task.get_state_name(),
|
||||
None,
|
||||
documentation,
|
||||
spiff_task.data)
|
||||
if hasattr(spiff_task.task_spec, "form"):
|
||||
instance.form = spiff_task.task_spec.form
|
||||
if documentation != "" and documentation is not None:
|
||||
|
||||
instance.process_documentation(documentation)
|
||||
return instance
|
||||
|
||||
def process_documentation(self, documentation):
|
||||
'''Runs markdown documentation through the Jinja2 processor to inject data
|
||||
create loops, etc...'''
|
||||
|
||||
template = Template(documentation)
|
||||
self.documentation = template.render(**self.data)
|
||||
|
||||
|
||||
class OptionSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ValidationSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["name", "config"]
|
||||
|
||||
|
||||
class PropertiesSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "value"]
|
||||
|
||||
|
||||
class FormFieldSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = [
|
||||
"id", "type", "label", "default_value", "options", "validation", "properties", "value"
|
||||
]
|
||||
|
||||
default_value = marshmallow.fields.String(required=False, allow_none=True)
|
||||
options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema))
|
||||
validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema))
|
||||
properties = marshmallow.fields.List(marshmallow.fields.Nested(PropertiesSchema))
|
||||
|
||||
|
||||
class FormSchema(ma.Schema):
|
||||
key = marshmallow.fields.String(required=True, allow_none=False)
|
||||
fields = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldSchema))
|
||||
|
||||
|
||||
class TaskSchema(ma.Schema):
|
||||
class Meta:
|
||||
fields = ["id", "name", "title", "type", "state", "form", "documentation", "data"]
|
||||
|
||||
documentation = marshmallow.fields.String(required=False, allow_none=True)
|
||||
form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
|
||||
title = marshmallow.fields.String(required=False, allow_none=True)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_task(self, data, **kwargs):
|
||||
return Task(**data)
|
||||
|
||||
|
||||
class WorkflowApi(object):
|
||||
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id):
|
||||
self.id = id
|
||||
self.status = status
|
||||
self.user_tasks = user_tasks
|
||||
self.last_task = last_task
|
||||
self.next_task = next_task
|
||||
self.workflow_spec_id = workflow_spec_id
|
||||
|
||||
|
||||
class WorkflowApiSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowApi
|
||||
fields = ["id", "status", "user_tasks", "last_task", "next_task", "workflow_spec_id"]
|
||||
unknown = INCLUDE
|
||||
|
||||
status = EnumField(WorkflowStatus)
|
||||
user_tasks = marshmallow.fields.List(marshmallow.fields.Nested(TaskSchema, dump_only=True))
|
||||
last_task = marshmallow.fields.Nested(TaskSchema, dump_only=True)
|
||||
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_workflow(self, data, **kwargs):
|
||||
return WorkflowApi(**data)
|
||||
|
|
|
@ -4,7 +4,7 @@ from jinja2 import UndefinedError
|
|||
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from docxtpl import DocxTemplate
|
||||
import jinja2
|
||||
|
@ -48,7 +48,7 @@ class CompleteTemplate(Script):
|
|||
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
|
||||
name=file_name,
|
||||
content_type=FileService.DOCX_MIME,
|
||||
content_type=CONTENT_TYPES['docx'],
|
||||
binary_data=final_document_stream.read())
|
||||
|
||||
print("Complete Task was called with %s" % str(args))
|
||||
|
|
|
@ -1,24 +1,33 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileType, FileDataModel, FileModelSchema, FileModel
|
||||
from crc.models.file import FileType, FileDataModel, FileModelSchema, FileModel, CONTENT_TYPES
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
import hashlib
|
||||
|
||||
|
||||
class FileService(object):
|
||||
"""Provides consistent management and rules for storing, retrieving and processing files."""
|
||||
|
||||
DOCX_MIME = "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
|
||||
@staticmethod
|
||||
def add_workflow_spec_file(workflow_spec_id, name, content_type, binary_data):
|
||||
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
|
||||
name, content_type, binary_data, primary=False):
|
||||
"""Create a new file and associate it with a workflow spec."""
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
workflow_spec_id=workflow_spec_id,
|
||||
workflow_spec_id=workflow_spec.id,
|
||||
name=name,
|
||||
primary=primary
|
||||
)
|
||||
if primary:
|
||||
bpmn: ElementTree.Element = ElementTree.fromstring(binary_data)
|
||||
workflow_spec.primary_process_id = WorkflowProcessor.get_process_id(bpmn)
|
||||
print("Locating Process Id for " + name + " " + workflow_spec.primary_process_id)
|
||||
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
|
@ -38,7 +47,6 @@ class FileService(object):
|
|||
def add_task_file(study_id, workflow_id, task_id, name, content_type, binary_data):
|
||||
"""Create a new file and associate it with an executing task within a workflow."""
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
|
@ -49,9 +57,14 @@ class FileService(object):
|
|||
@staticmethod
|
||||
def update_file(file_model, binary_data, content_type):
|
||||
|
||||
file_model.version = file_model.version + 1
|
||||
file_model.last_updated = datetime.now()
|
||||
file_model.content_type = content_type
|
||||
file_data_model = session.query(FileDataModel).\
|
||||
filter_by(file_model_id=file_model.id,
|
||||
version=file_model.latest_version
|
||||
).with_for_update().first()
|
||||
md5_checksum = UUID(hashlib.md5(binary_data).hexdigest())
|
||||
if(file_data_model is not None and md5_checksum == file_data_model.md5_hash):
|
||||
# This file does not need to be updated, it's the same file.
|
||||
return file_model
|
||||
|
||||
# Verify the extension
|
||||
basename, file_extension = os.path.splitext(file_model.name)
|
||||
|
@ -62,12 +75,16 @@ class FileService(object):
|
|||
file_extension)), 404
|
||||
else:
|
||||
file_model.type = FileType[file_extension]
|
||||
file_model.content_type = content_type
|
||||
|
||||
file_data_model = session.query(FileDataModel).filter_by(file_model_id=file_model.id).with_for_update().first()
|
||||
if file_data_model is None:
|
||||
file_data_model = FileDataModel(data=binary_data, file_model=file_model)
|
||||
version = 1
|
||||
else:
|
||||
file_data_model.data = binary_data
|
||||
version = file_data_model.version + 1
|
||||
|
||||
file_model.latest_version = version
|
||||
file_data_model = FileDataModel(data=binary_data, file_model=file_model, version=version,
|
||||
md5_hash=md5_checksum)
|
||||
|
||||
session.add_all([file_model, file_data_model])
|
||||
session.commit()
|
||||
|
@ -94,4 +111,8 @@ class FileService(object):
|
|||
@staticmethod
|
||||
def get_file_data(file_id):
|
||||
"""Returns the file_data that is associated with the file model id"""
|
||||
return session.query(FileDataModel).filter(FileDataModel.file_model_id == file_id).first()
|
||||
file_model = session.query(FileModel).filter(FileModel.id == file_id).first()
|
||||
return session.query(FileDataModel)\
|
||||
.filter(FileDataModel.file_model_id == file_id)\
|
||||
.filter(FileDataModel.version == file_model.latest_version)\
|
||||
.first()
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import json
|
||||
import re
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
|
@ -10,7 +11,7 @@ from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
|||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.operators import Operator
|
||||
|
||||
from crc import session
|
||||
from crc import session, db
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileDataModel, FileModel, FileType
|
||||
from crc.models.workflow import WorkflowStatus, WorkflowModel
|
||||
|
@ -90,24 +91,97 @@ class WorkflowProcessor(object):
|
|||
WORKFLOW_ID_KEY = "workflow_id"
|
||||
STUDY_ID_KEY = "study_id"
|
||||
|
||||
def __init__(self, workflow_spec_id, bpmn_json):
|
||||
wf_spec = self.get_spec(workflow_spec_id)
|
||||
self.workflow_spec_id = workflow_spec_id
|
||||
self.bpmn_workflow = self._serializer.deserialize_workflow(bpmn_json, workflow_spec=wf_spec)
|
||||
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False):
|
||||
"""Create a Workflow Processor based on the serialized information available in the workflow model.
|
||||
If soft_reset is set to true, it will try to use the latest version of the workflow specification.
|
||||
If hard_reset is set to true, it will create a new Workflow, but embed the data from the last
|
||||
completed task in the previous workflow.
|
||||
If neither flag is set, it will use the same version of the specification that was used to originally
|
||||
create the workflow model. """
|
||||
if soft_reset:
|
||||
spec = self.get_spec(workflow_model.workflow_spec_id)
|
||||
workflow_model.spec_version = spec.description
|
||||
else:
|
||||
spec = self.get_spec(workflow_model.workflow_spec_id, workflow_model.spec_version)
|
||||
|
||||
self.workflow_spec_id = workflow_model.workflow_spec_id
|
||||
self.bpmn_workflow = self._serializer.deserialize_workflow(workflow_model.bpmn_workflow_json, workflow_spec=spec)
|
||||
self.bpmn_workflow.script_engine = self._script_engine
|
||||
|
||||
if hard_reset:
|
||||
# Now that the spec is loaded, get the data and rebuild the bpmn with the new details
|
||||
workflow_model.spec_version = self.hard_reset()
|
||||
|
||||
@staticmethod
|
||||
def get_parser():
|
||||
parser = MyCustomParser()
|
||||
return parser
|
||||
|
||||
@staticmethod
|
||||
def get_spec(workflow_spec_id):
|
||||
def get_latest_version_string(workflow_spec_id):
|
||||
"""Version is in the format v[VERSION] (FILE_ID_LIST)
|
||||
For example, a single bpmn file with only one version would be
|
||||
v1 (12) Where 12 is the id of the file data model that is used to create the
|
||||
specification. If multiple files exist, they are added on in
|
||||
dot notation to both the version number and the file list. So
|
||||
a Spec that includes a BPMN, DMN, an a Word file all on the first
|
||||
version would be v1.1.1 (12.45.21)"""
|
||||
|
||||
# this could potentially become expensive to load all the data in the data models.
|
||||
# in which case we might consider using a deferred loader for the actual data, but
|
||||
# trying not to pre-optimize.
|
||||
file_data_models = WorkflowProcessor.__get_latest_file_models(workflow_spec_id)
|
||||
major_version = 0 # The version of the primary file.
|
||||
minor_version = [] # The versions of the minor files if any.
|
||||
file_ids = []
|
||||
for file_data in file_data_models:
|
||||
file_ids.append(file_data.id)
|
||||
if file_data.file_model.primary:
|
||||
major_version = file_data.version
|
||||
else:
|
||||
minor_version.append(file_data.version)
|
||||
minor_version.insert(0, major_version) # Add major version to beginning.
|
||||
version = ".".join(str(x) for x in minor_version)
|
||||
files = ".".join(str(x) for x in file_ids)
|
||||
full_version = "v%s (%s)" % (version, files)
|
||||
return full_version
|
||||
|
||||
@staticmethod
|
||||
def __get_file_models_for_version(workflow_spec_id, version):
|
||||
file_id_strings = re.findall('\((.*)\)', version)[0].split(".")
|
||||
file_ids = [int(i) for i in file_id_strings]
|
||||
files = session.query(FileDataModel)\
|
||||
.join(FileModel) \
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec_id)\
|
||||
.filter(FileDataModel.id.in_(file_ids)).all()
|
||||
if len(files) != len(file_ids):
|
||||
raise ApiError("invalid_version",
|
||||
"The version '%s' of workflow specification '%s' is invalid. Unable to locate the correct files to recreate it." %
|
||||
(version, workflow_spec_id))
|
||||
return files
|
||||
|
||||
@staticmethod
|
||||
def __get_latest_file_models(workflow_spec_id):
|
||||
"""Returns all the latest files related to a workflow specification"""
|
||||
return session.query(FileDataModel) \
|
||||
.join(FileModel) \
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec_id)\
|
||||
.filter(FileDataModel.version == FileModel.latest_version)\
|
||||
.order_by(FileModel.id)\
|
||||
.all()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_spec(workflow_spec_id, version=None):
|
||||
"""Returns the requested version of the specification,
|
||||
or the lastest version if none is specified."""
|
||||
parser = WorkflowProcessor.get_parser()
|
||||
process_id = None
|
||||
file_data_models = session.query(FileDataModel) \
|
||||
.join(FileModel) \
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec_id).all()
|
||||
if version is None:
|
||||
file_data_models = WorkflowProcessor.__get_latest_file_models(workflow_spec_id)
|
||||
version = WorkflowProcessor.get_latest_version_string(workflow_spec_id)
|
||||
else:
|
||||
file_data_models = WorkflowProcessor.__get_file_models_for_version(workflow_spec_id, version)
|
||||
for file_data in file_data_models:
|
||||
if file_data.file_model.type == FileType.bpmn:
|
||||
bpmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
|
||||
|
@ -119,8 +193,19 @@ class WorkflowProcessor(object):
|
|||
parser.add_dmn_xml(dmn, filename=file_data.file_model.name)
|
||||
if process_id is None:
|
||||
raise(Exception("There is no primary BPMN model defined for workflow %s" % workflow_spec_id))
|
||||
return parser.get_spec(process_id)
|
||||
spec = parser.get_spec(process_id)
|
||||
spec.description = version
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def status_of(bpmn_workflow):
|
||||
if bpmn_workflow.is_completed():
|
||||
return WorkflowStatus.complete
|
||||
user_tasks = bpmn_workflow.get_ready_user_tasks()
|
||||
if len(user_tasks) > 0:
|
||||
return WorkflowStatus.user_input_required
|
||||
else:
|
||||
return WorkflowStatus.waiting
|
||||
|
||||
@classmethod
|
||||
def create(cls, study_id, workflow_spec_id):
|
||||
|
@ -128,30 +213,45 @@ class WorkflowProcessor(object):
|
|||
bpmn_workflow = BpmnWorkflow(spec, script_engine=cls._script_engine)
|
||||
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study_id
|
||||
bpmn_workflow.do_engine_steps()
|
||||
json = cls._serializer.serialize_workflow(bpmn_workflow)
|
||||
processor = cls(workflow_spec_id, json)
|
||||
workflow_model = WorkflowModel(status=processor.get_status(),
|
||||
workflow_model = WorkflowModel(status=WorkflowProcessor.status_of(bpmn_workflow),
|
||||
study_id=study_id,
|
||||
workflow_spec_id=workflow_spec_id)
|
||||
workflow_spec_id=workflow_spec_id,
|
||||
spec_version=spec.description)
|
||||
session.add(workflow_model)
|
||||
session.commit()
|
||||
# Need to commit twice, first to get a unique id for the workflow model, and
|
||||
# a second time to store the serilaization so we can maintain this link within
|
||||
# the spiff-workflow process.
|
||||
processor.bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
|
||||
workflow_model.bpmn_workflow_json = processor.serialize()
|
||||
bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
|
||||
|
||||
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(bpmn_workflow)
|
||||
session.add(workflow_model)
|
||||
session.commit()
|
||||
processor = cls(workflow_model)
|
||||
return processor
|
||||
|
||||
def hard_reset(self):
|
||||
"""Recreate this workflow, but keep the data from the last completed task and add it back into the first task.
|
||||
This may be useful when a workflow specification changes, and users need to review all the
|
||||
prior steps, but don't need to reenter all the previous data.
|
||||
|
||||
Returns the new version.
|
||||
"""
|
||||
spec = WorkflowProcessor.get_spec(self.workflow_spec_id)
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
|
||||
bpmn_workflow.data = self.bpmn_workflow.data
|
||||
for task in bpmn_workflow.get_tasks(SpiffTask.READY):
|
||||
task.data = self.bpmn_workflow.last_task.data
|
||||
bpmn_workflow.do_engine_steps()
|
||||
self.bpmn_workflow = bpmn_workflow
|
||||
return spec.description
|
||||
|
||||
def get_status(self):
|
||||
if self.bpmn_workflow.is_completed():
|
||||
return WorkflowStatus.complete
|
||||
user_tasks = self.bpmn_workflow.get_ready_user_tasks()
|
||||
if len(user_tasks) > 0:
|
||||
return WorkflowStatus.user_input_required
|
||||
else:
|
||||
return WorkflowStatus.waiting
|
||||
return self.status_of(self.bpmn_workflow)
|
||||
|
||||
def get_spec_version(self):
|
||||
"""We use the spec's descrption field to store the version information"""
|
||||
return self.bpmn_workflow.spec.description
|
||||
|
||||
def do_engine_steps(self):
|
||||
self.bpmn_workflow.do_engine_steps()
|
||||
|
|
108
example_data.py
108
example_data.py
|
@ -4,16 +4,24 @@ import os
|
|||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
from crc import app, db, session
|
||||
from crc.models.file import FileType, FileModel, FileDataModel
|
||||
from crc.models.file import FileType, FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.user import UserModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus
|
||||
|
||||
|
||||
class ExampleDataLoader:
|
||||
def make_data(self):
|
||||
@staticmethod
|
||||
def clean_db():
|
||||
session.flush() # Clear out any transactions before deleting it all to avoid spurious errors.
|
||||
for table in reversed(db.metadata.sorted_tables):
|
||||
session.execute(table.delete())
|
||||
session.flush()
|
||||
|
||||
def load_all(self):
|
||||
users = [
|
||||
UserModel(
|
||||
uid='dhf8r',
|
||||
|
@ -26,6 +34,8 @@ class ExampleDataLoader:
|
|||
title='SOFTWARE ENGINEER V'
|
||||
)
|
||||
]
|
||||
db.session.add_all(users)
|
||||
db.session.commit()
|
||||
|
||||
studies = [
|
||||
StudyModel(
|
||||
|
@ -49,89 +59,53 @@ class ExampleDataLoader:
|
|||
user_uid='dhf8r'
|
||||
),
|
||||
]
|
||||
db.session.add_all(studies)
|
||||
db.session.commit()
|
||||
|
||||
self.create_spec(id="crc2_training_session_enter_core_info",
|
||||
name="crc2_training_session_enter_core_info",
|
||||
display_name="CR Connect2 - Training Session - Core Info",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
self.create_spec(id="crc2_training_session_data_security_plan",
|
||||
name="crc2_training_session_data_security_plan",
|
||||
display_name="CR Connect2 - Training Session - Data Security Plan",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
self.create_spec(id="crc2_training_session_sponsor_funding_source",
|
||||
name="crc2_training_session_sponsor_funding_source",
|
||||
display_name="CR Connect2 - Training Session - Sponsor and/or Funding Source",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
|
||||
workflow_specifications = \
|
||||
self.create_spec(id="crc2_training_session_enter_core_info",
|
||||
name="crc2_training_session_enter_core_info",
|
||||
display_name="CR Connect2 - Training Session - Core Info",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
workflow_specifications += \
|
||||
self.create_spec(id="crc2_training_session_data_security_plan",
|
||||
name="crc2_training_session_data_security_plan",
|
||||
display_name="CR Connect2 - Training Session - Data Security Plan",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
workflow_specifications += \
|
||||
self.create_spec(id="crc2_training_session_sponsor_funding_source",
|
||||
name="crc2_training_session_sponsor_funding_source",
|
||||
display_name="CR Connect2 - Training Session - Sponsor and/or Funding Source",
|
||||
description='Part of Milestone 3 Deliverable')
|
||||
# workflow_specifications += \
|
||||
# self.create_spec(id="m2_demo",
|
||||
# name="m2_demo",
|
||||
# display_name="Milestone 2 Demo",
|
||||
# description='A simplified CR Connect workflow for demonstration purposes.')
|
||||
# workflow_specifications += \
|
||||
# self.create_spec(id="crc_study_workflow",
|
||||
# name="crc_study_workflow",
|
||||
# display_name="CR Connect Study Workflow",
|
||||
# description='Draft workflow for CR Connect studies.')
|
||||
all_data = users + studies + workflow_specifications
|
||||
return all_data
|
||||
|
||||
def create_spec(self, id, name, display_name="", description="", filepath=None):
|
||||
"""Assumes that a directory exists in static/bpmn with the same name as the given id.
|
||||
further assumes that the [id].bpmn is the primary file for the workflow.
|
||||
returns an array of data models to be added to the database."""
|
||||
models = []
|
||||
global file
|
||||
file_service = FileService()
|
||||
|
||||
spec = WorkflowSpecModel(id=id,
|
||||
name=name,
|
||||
display_name=display_name,
|
||||
description=description)
|
||||
models.append(spec)
|
||||
db.session.add(spec)
|
||||
db.session.commit()
|
||||
if not filepath:
|
||||
filepath = os.path.join(app.root_path, 'static', 'bpmn', id, "*")
|
||||
files = glob.glob(filepath)
|
||||
for file_path in files:
|
||||
noise, file_extension = os.path.splitext(file_path)
|
||||
filename = os.path.basename(file_path)
|
||||
if file_extension.lower() == '.bpmn':
|
||||
type = FileType.bpmn
|
||||
elif file_extension.lower() == '.dmn':
|
||||
type = FileType.dmn
|
||||
elif file_extension.lower() == '.svg':
|
||||
type = FileType.svg
|
||||
elif file_extension.lower() == '.docx':
|
||||
type = FileType.docx
|
||||
else:
|
||||
raise Exception("Unsupported file type:" + file_path)
|
||||
continue
|
||||
|
||||
is_primary = filename.lower() == id + ".bpmn"
|
||||
file_model = FileModel(name=filename, type=type, content_type='text/xml', version="1",
|
||||
last_updated=datetime.datetime.now(), primary=is_primary,
|
||||
workflow_spec_id=id)
|
||||
models.append(file_model)
|
||||
try:
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
if (is_primary):
|
||||
bpmn: ElementTree.Element = ElementTree.fromstring(data)
|
||||
spec.primary_process_id = WorkflowProcessor.get_process_id(bpmn)
|
||||
print("Locating Process Id for " + filename + " " + spec.primary_process_id)
|
||||
models.append(FileDataModel(data=data, file_model=file_model))
|
||||
content_type = CONTENT_TYPES[file_extension[1:]]
|
||||
file_service.add_workflow_spec_file(workflow_spec=spec, name=filename, content_type=content_type,
|
||||
binary_data=data, primary=is_primary)
|
||||
except IsADirectoryError as de:
|
||||
# Ignore sub directories
|
||||
pass
|
||||
finally:
|
||||
file.close()
|
||||
return models
|
||||
|
||||
@staticmethod
|
||||
def clean_db():
|
||||
session.flush() # Clear out any transactions before deleting it all to avoid spurious errors.
|
||||
for table in reversed(db.metadata.sorted_tables):
|
||||
session.execute(table.delete())
|
||||
session.flush()
|
||||
|
||||
def load_all(self):
|
||||
for data in self.make_data():
|
||||
session.add(data)
|
||||
session.commit()
|
||||
session.flush()
|
||||
if file:
|
||||
file.close()
|
||||
return spec
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 1c6e4e179f8e
|
||||
Revision ID: 8856126b6658
|
||||
Revises:
|
||||
Create Date: 2020-03-03 15:51:45.550681
|
||||
Create Date: 2020-03-06 09:49:37.872516
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1c6e4e179f8e'
|
||||
revision = '8856126b6658'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -61,6 +61,7 @@ def upgrade():
|
|||
sa.Column('status', sa.Enum('new', 'user_input_required', 'waiting', 'complete', name='workflowstatus'), nullable=True),
|
||||
sa.Column('study_id', sa.Integer(), nullable=True),
|
||||
sa.Column('workflow_spec_id', sa.String(), nullable=True),
|
||||
sa.Column('spec_version', sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ),
|
||||
sa.ForeignKeyConstraint(['workflow_spec_id'], ['workflow_spec.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
|
@ -68,8 +69,6 @@ def upgrade():
|
|||
op.create_table('file',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('version', sa.Integer(), nullable=True),
|
||||
sa.Column('last_updated', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('type', sa.Enum('bpmn', 'csv', 'dmn', 'doc', 'docx', 'gif', 'jpg', 'md', 'pdf', 'png', 'ppt', 'pptx', 'rtf', 'svg', 'svg_xml', 'txt', 'xls', 'xlsx', 'xml', 'zip', name='filetype'), nullable=True),
|
||||
sa.Column('primary', sa.Boolean(), nullable=True),
|
||||
sa.Column('content_type', sa.String(), nullable=True),
|
||||
|
@ -78,6 +77,7 @@ def upgrade():
|
|||
sa.Column('study_id', sa.Integer(), nullable=True),
|
||||
sa.Column('task_id', sa.String(), nullable=True),
|
||||
sa.Column('form_field_key', sa.String(), nullable=True),
|
||||
sa.Column('latest_version', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ),
|
||||
sa.ForeignKeyConstraint(['workflow_id'], ['workflow.id'], ),
|
||||
sa.ForeignKeyConstraint(['workflow_spec_id'], ['workflow_spec.id'], ),
|
||||
|
@ -85,7 +85,10 @@ def upgrade():
|
|||
)
|
||||
op.create_table('file_data',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('md5_hash', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('data', sa.LargeBinary(), nullable=True),
|
||||
sa.Column('version', sa.Integer(), nullable=True),
|
||||
sa.Column('last_updated', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('file_model_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['file_model_id'], ['file.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
|
@ -5,9 +5,11 @@ import os
|
|||
import unittest
|
||||
import urllib.parse
|
||||
|
||||
from crc.services.file_service import FileService
|
||||
|
||||
os.environ["TESTING"] = "true"
|
||||
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.models.user import UserModel
|
||||
|
||||
|
@ -92,15 +94,7 @@ class BaseTest(unittest.TestCase):
|
|||
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
|
||||
return
|
||||
filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*")
|
||||
models = ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath)
|
||||
spec = None
|
||||
for model in models:
|
||||
if isinstance(model, WorkflowSpecModel):
|
||||
spec = model
|
||||
session.add(model)
|
||||
session.commit()
|
||||
session.flush()
|
||||
return spec
|
||||
return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath)
|
||||
|
||||
@staticmethod
|
||||
def protocol_builder_response(file_name):
|
||||
|
@ -137,3 +131,13 @@ class BaseTest(unittest.TestCase):
|
|||
return '?%s' % '&'.join(query_string_list)
|
||||
|
||||
|
||||
def replace_file(self, name, file_path):
|
||||
"""Replaces a stored file with the given name with the contents of the file at the given path."""
|
||||
file_service = FileService()
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
|
||||
file_model = db.session.query(FileModel).filter(FileModel.name == name).first()
|
||||
noise, file_extension = os.path.splitext(file_path)
|
||||
content_type = CONTENT_TYPES[file_extension[1:]]
|
||||
file_service.update_file(file_model, data, content_type)
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_06g9dcb" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
|
||||
<bpmn:process id="Process_1giz8il" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_0myefwb</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0myefwb" sourceRef="StartEvent_1" targetRef="StepOne" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_00p5po6" sourceRef="StepOne" targetRef="Task_1i59nh4" />
|
||||
<bpmn:endEvent id="EndEvent_1gsujvg">
|
||||
<bpmn:incoming>SequenceFlow_17ggqu2</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:userTask id="StepOne" name="Step 1" camunda:formKey="StepOneForm">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="color" label="What is your favorite color?" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0myefwb</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_00p5po6</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_17ggqu2" sourceRef="Task_1i59nh4" targetRef="EndEvent_1gsujvg" />
|
||||
<bpmn:userTask id="Task_1i59nh4" name="New Step" camunda:formKey="MyNewForm">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_01vbdk5" label="I forgot to ask you about this, what is your quest?" type="string" defaultValue="To seak the holy grail!" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_00p5po6</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_17ggqu2</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:textAnnotation id="TextAnnotation_1haj11l">
|
||||
<bpmn:text>We have a test that replaces tow_forms with this file, which adds a new step to the process. A breaking change.</bpmn:text>
|
||||
</bpmn:textAnnotation>
|
||||
<bpmn:association id="Association_02qm351" sourceRef="Task_1i59nh4" targetRef="TextAnnotation_1haj11l" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1giz8il">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="279" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0myefwb_di" bpmnElement="SequenceFlow_0myefwb">
|
||||
<di:waypoint x="215" y="297" />
|
||||
<di:waypoint x="270" y="297" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_00p5po6_di" bpmnElement="SequenceFlow_00p5po6">
|
||||
<di:waypoint x="370" y="297" />
|
||||
<di:waypoint x="420" y="297" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="EndEvent_1gsujvg_di" bpmnElement="EndEvent_1gsujvg">
|
||||
<dc:Bounds x="712" y="279" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1xakn8i_di" bpmnElement="StepOne">
|
||||
<dc:Bounds x="270" y="257" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_17ggqu2_di" bpmnElement="SequenceFlow_17ggqu2">
|
||||
<di:waypoint x="520" y="297" />
|
||||
<di:waypoint x="712" y="297" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_1tw6x6h_di" bpmnElement="Task_1i59nh4">
|
||||
<dc:Bounds x="420" y="257" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="TextAnnotation_1haj11l_di" bpmnElement="TextAnnotation_1haj11l">
|
||||
<dc:Bounds x="540" y="80" width="169" height="136" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Association_02qm351_di" bpmnElement="Association_02qm351">
|
||||
<di:waypoint x="511" y="257" />
|
||||
<di:waypoint x="554" y="216" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,72 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_06g9dcb" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
|
||||
<bpmn:process id="Process_1giz8il" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_0myefwb</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0myefwb" sourceRef="StartEvent_1" targetRef="StepOne" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_00p5po6" sourceRef="StepOne" targetRef="StepTwo" />
|
||||
<bpmn:endEvent id="EndEvent_1gsujvg">
|
||||
<bpmn:incoming>SequenceFlow_0huye14</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0huye14" sourceRef="StepTwo" targetRef="EndEvent_1gsujvg" />
|
||||
<bpmn:userTask id="StepOne" name="Step 1" camunda:formKey="StepOneForm">
|
||||
<bpmn:documentation># This is some documentation I wanted to add.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="color" label="What is your favorite color?" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0myefwb</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_00p5po6</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="StepTwo" name="Step 2" camunda:formKey="StepTwoForm">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="capital" label="What is the capital of Assyria?" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_00p5po6</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0huye14</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:textAnnotation id="TextAnnotation_0uiis6p">
|
||||
<bpmn:text>There is a minor text change to the description here.</bpmn:text>
|
||||
</bpmn:textAnnotation>
|
||||
<bpmn:association id="Association_1nt50pu" sourceRef="StepOne" targetRef="TextAnnotation_0uiis6p" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1giz8il">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="219" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0myefwb_di" bpmnElement="SequenceFlow_0myefwb">
|
||||
<di:waypoint x="215" y="237" />
|
||||
<di:waypoint x="270" y="237" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_00p5po6_di" bpmnElement="SequenceFlow_00p5po6">
|
||||
<di:waypoint x="370" y="237" />
|
||||
<di:waypoint x="430" y="237" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="EndEvent_1gsujvg_di" bpmnElement="EndEvent_1gsujvg">
|
||||
<dc:Bounds x="592" y="219" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0huye14_di" bpmnElement="SequenceFlow_0huye14">
|
||||
<di:waypoint x="530" y="237" />
|
||||
<di:waypoint x="592" y="237" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_1xakn8i_di" bpmnElement="StepOne">
|
||||
<dc:Bounds x="270" y="197" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0fltcd6_di" bpmnElement="StepTwo">
|
||||
<dc:Bounds x="430" y="197" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="TextAnnotation_0uiis6p_di" bpmnElement="TextAnnotation_0uiis6p">
|
||||
<dc:Bounds x="370" y="80" width="100" height="82" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Association_1nt50pu_di" bpmnElement="Association_1nt50pu">
|
||||
<di:waypoint x="354" y="197" />
|
||||
<di:waypoint x="385" y="162" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -25,7 +25,7 @@ class TestFilesApi(BaseTest):
|
|||
def test_list_multiple_files_for_workflow_spec(self):
|
||||
self.load_example_data()
|
||||
spec = session.query(WorkflowSpecModel).first()
|
||||
svgFile = FileModel(name="test.svg", type=FileType.svg, version=1, last_updated=datetime.now(),
|
||||
svgFile = FileModel(name="test.svg", type=FileType.svg,
|
||||
primary=False, workflow_spec_id=spec.id)
|
||||
session.add(svgFile)
|
||||
session.flush()
|
||||
|
@ -47,7 +47,6 @@ class TestFilesApi(BaseTest):
|
|||
self.assertIsNotNone(rv.get_data())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
file = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(1, file.version)
|
||||
self.assertEqual(FileType.svg, file.type)
|
||||
self.assertFalse(file.primary)
|
||||
self.assertEqual("image/svg+xml", file.content_type)
|
||||
|
@ -89,13 +88,36 @@ class TestFilesApi(BaseTest):
|
|||
self.assertIsNotNone(rv.get_data())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
file = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(2, file.version)
|
||||
self.assertEqual(2, file.latest_version)
|
||||
self.assertEqual(FileType.bpmn, file.type)
|
||||
self.assertEqual("application/octet-stream", file.content_type)
|
||||
self.assertEqual(spec.id, file.workflow_spec_id)
|
||||
|
||||
data_model = session.query(FileDataModel).filter_by(file_model_id=file.id).first()
|
||||
self.assertEqual(b"hijklim", data_model.data)
|
||||
rv = self.app.get('/v1.0/file/%i/data' % file.id)
|
||||
self.assert_success(rv)
|
||||
data = rv.get_data()
|
||||
self.assertIsNotNone(data)
|
||||
self.assertEqual(b"hijklim", data)
|
||||
|
||||
def test_update_with_same_exact_data_does_not_increment_version(self):
|
||||
self.load_example_data()
|
||||
spec = session.query(WorkflowSpecModel).first()
|
||||
data = {}
|
||||
data['file'] = io.BytesIO(b"abcdef"), 'my_new_file.bpmn'
|
||||
rv = self.app.post('/v1.0/file?workflow_spec_id=%s' % spec.id, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assertIsNotNone(rv.get_data())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
file = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(1, file.latest_version)
|
||||
data['file'] = io.BytesIO(b"abcdef"), 'my_new_file.bpmn'
|
||||
rv = self.app.put('/v1.0/file/%i/data' % file.id, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assertIsNotNone(rv.get_data())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
file = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(1, file.latest_version)
|
||||
|
||||
|
||||
def test_get_file(self):
|
||||
self.load_example_data()
|
||||
|
|
|
@ -3,11 +3,11 @@ from datetime import datetime, timezone
|
|||
from unittest.mock import patch, Mock
|
||||
|
||||
from crc import session
|
||||
from crc.models.api_models import WorkflowApiSchema
|
||||
from crc.models.study import StudyModel, StudyModelSchema
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
|
||||
ProtocolBuilderStudySchema
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \
|
||||
WorkflowApiSchema
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
||||
|
@ -155,3 +155,33 @@ class TestStudyApi(BaseTest):
|
|||
rv = self.app.delete('/v1.0/workflow/%i' % workflow.id)
|
||||
self.assert_success(rv)
|
||||
self.assertEqual(0, session.query(WorkflowModel).count())
|
||||
|
||||
def test_get_study_workflows(self):
|
||||
self.load_example_data()
|
||||
|
||||
# Should have no workflows to start
|
||||
study = session.query(StudyModel).first()
|
||||
response_before = self.app.get('/v1.0/study/%i/workflows' % study.id,
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers())
|
||||
self.assert_success(response_before)
|
||||
json_data_before = json.loads(response_before.get_data(as_text=True))
|
||||
workflows_before = WorkflowApiSchema(many=True).load(json_data_before)
|
||||
self.assertEqual(0, len(workflows_before))
|
||||
|
||||
# Add a workflow
|
||||
spec = session.query(WorkflowSpecModel).first()
|
||||
add_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(),
|
||||
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
|
||||
self.assert_success(add_response)
|
||||
|
||||
# Should have one workflow now
|
||||
response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers())
|
||||
self.assert_success(response_after)
|
||||
json_data_after = json.loads(response_after.get_data(as_text=True))
|
||||
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
|
||||
self.assertEqual(1, len(workflows_after))
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import json
|
||||
from unittest.mock import patch
|
||||
import os
|
||||
|
||||
from crc import session
|
||||
from crc import session, app
|
||||
from crc.models.api_models import WorkflowApiSchema, Task
|
||||
from crc.models.file import FileModelSchema
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, \
|
||||
WorkflowApiSchema, WorkflowStatus, Task
|
||||
from crc.models.workflow import WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
@ -25,8 +25,10 @@ class TestTasksApi(BaseTest):
|
|||
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
|
||||
return workflow
|
||||
|
||||
def get_workflow_api(self, workflow):
|
||||
rv = self.app.get('/v1.0/workflow/%i' % workflow.id, content_type="application/json")
|
||||
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False):
|
||||
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
|
||||
(workflow.id, str(soft_reset), str(hard_reset)),
|
||||
content_type="application/json")
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
workflow_api = WorkflowApiSchema().load(json_data)
|
||||
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
|
||||
|
@ -203,6 +205,24 @@ class TestTasksApi(BaseTest):
|
|||
self.assertIsNotNone(workflow_api.next_task['documentation'])
|
||||
self.assertTrue("norris" in workflow_api.next_task['documentation'])
|
||||
|
||||
def test_load_workflow_from_outdated_spec(self):
|
||||
|
||||
# Start the basic two_forms workflow and complete a task.
|
||||
self.load_example_data()
|
||||
workflow = self.create_workflow('two_forms')
|
||||
workflow_api = self.get_workflow_api(workflow)
|
||||
self.complete_form(workflow, workflow_api.user_tasks[0], {"color": "blue"})
|
||||
self.assertTrue(workflow_api.is_latest_spec)
|
||||
|
||||
# response = ProtocolBuilderService.get_study_details(self.test_study_id)
|
||||
# Modify the specification, with a major change that alters the flow and can't be deserialized
|
||||
# effectively, if it uses the latest spec files.
|
||||
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn')
|
||||
self.replace_file("two_forms.bpmn", file_path)
|
||||
|
||||
workflow_api = self.get_workflow_api(workflow)
|
||||
self.assertTrue(workflow_api.spec_version.startswith("v1 "))
|
||||
self.assertFalse(workflow_api.is_latest_spec)
|
||||
|
||||
workflow_api = self.get_workflow_api(workflow, hard_reset=True)
|
||||
self.assertTrue(workflow_api.spec_version.startswith("v2 "))
|
||||
self.assertTrue(workflow_api.is_latest_spec)
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from crc import session, app
|
||||
from crc.models.study import StudyModel, StudyModelSchema
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \
|
||||
WorkflowApiSchema
|
||||
from crc import app
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import os
|
||||
import string
|
||||
import random
|
||||
from unittest.mock import patch
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
|
||||
|
||||
from crc import session
|
||||
from crc import session, db, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus, WorkflowModel
|
||||
from crc.services.file_service import FileService
|
||||
from tests.base_test import BaseTest
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
@ -214,3 +216,110 @@ class TestWorkflowProcessor(BaseTest):
|
|||
self.assertIn("last_updated", task.data["study"]["info"])
|
||||
self.assertIn("sponsor", task.data["study"]["info"])
|
||||
|
||||
def test_spec_versioning(self):
|
||||
self.load_example_data()
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = self.load_test_spec("decision_table")
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertTrue(processor.get_spec_version().startswith('v1.1'))
|
||||
file_service = FileService()
|
||||
|
||||
file_service.add_workflow_spec_file(workflow_spec_model, "new_file.txt", "txt", b'blahblah')
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertTrue(processor.get_spec_version().startswith('v1.1.1'))
|
||||
|
||||
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'docx', 'docx.bpmn')
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
|
||||
file_model = db.session.query(FileModel).filter(FileModel.name == "decision_table.bpmn").first()
|
||||
file_service.update_file(file_model, data, "txt")
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertTrue(processor.get_spec_version().startswith('v2.1.1'))
|
||||
|
||||
def test_restart_workflow(self):
|
||||
self.load_example_data()
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = self.load_test_spec("two_forms")
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
|
||||
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
|
||||
task = processor.next_task()
|
||||
task.data = {"key": "Value"}
|
||||
processor.complete_task(task)
|
||||
task_before_restart = processor.next_task()
|
||||
processor.hard_reset()
|
||||
task_after_restart = processor.next_task()
|
||||
|
||||
self.assertNotEqual(task.get_name(), task_before_restart.get_name())
|
||||
self.assertEqual(task.get_name(), task_after_restart.get_name())
|
||||
self.assertEqual(task.data, task_after_restart.data)
|
||||
|
||||
def test_soft_reset(self):
|
||||
self.load_example_data()
|
||||
|
||||
# Start the two_forms workflow, and enter some data in the first form.
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = self.load_test_spec("two_forms")
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
|
||||
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
|
||||
task = processor.next_task()
|
||||
task.data = {"color": "blue"}
|
||||
processor.complete_task(task)
|
||||
|
||||
# Modify the specification, with a minor text change.
|
||||
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_text_mod.bpmn')
|
||||
self.replace_file("two_forms.bpmn", file_path)
|
||||
|
||||
# Setting up another processor should not error out, but doesn't pick up the update.
|
||||
workflow_model.bpmn_workflow_json = processor.serialize()
|
||||
processor2 = WorkflowProcessor(workflow_model)
|
||||
self.assertEquals("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
|
||||
self.assertNotEquals("# This is some documentation I wanted to add.",
|
||||
processor2.bpmn_workflow.last_task.task_spec.documentation)
|
||||
|
||||
# You can do a soft update and get the right response.
|
||||
processor3 = WorkflowProcessor(workflow_model, soft_reset=True)
|
||||
self.assertEquals("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
|
||||
self.assertEquals("# This is some documentation I wanted to add.",
|
||||
processor3.bpmn_workflow.last_task.task_spec.documentation)
|
||||
|
||||
|
||||
|
||||
def test_hard_reset(self):
|
||||
self.load_example_data()
|
||||
|
||||
# Start the two_forms workflow, and enter some data in the first form.
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = self.load_test_spec("two_forms")
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
|
||||
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
|
||||
task = processor.next_task()
|
||||
task.data = {"color": "blue"}
|
||||
processor.complete_task(task)
|
||||
next_task = processor.next_task()
|
||||
self.assertEquals("Step 2", next_task.task_spec.description)
|
||||
|
||||
# Modify the specification, with a major change that alters the flow and can't be serialized effectively.
|
||||
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn')
|
||||
self.replace_file("two_forms.bpmn", file_path)
|
||||
|
||||
# Assure that creating a new processor doesn't cause any issues, and maintains the spec version.
|
||||
workflow_model.bpmn_workflow_json = processor.serialize()
|
||||
processor2 = WorkflowProcessor(workflow_model)
|
||||
self.assertTrue(processor2.get_spec_version().startswith("v1 ")) # Still at version 1.
|
||||
|
||||
# Do a hard reset, which should bring us back to the beginning, but retain the data.
|
||||
processor3 = WorkflowProcessor(workflow_model, hard_reset=True)
|
||||
self.assertEquals("Step 1", processor3.next_task().task_spec.description)
|
||||
self.assertEquals({"color": "blue"}, processor3.next_task().data)
|
||||
processor3.complete_task(processor3.next_task())
|
||||
self.assertEquals("New Step", processor3.next_task().task_spec.description)
|
||||
self.assertEquals({"color": "blue"}, processor3.next_task().data)
|
||||
|
||||
def test_get_latest_spec_version(self):
|
||||
workflow_spec_model = self.load_test_spec("two_forms")
|
||||
version = WorkflowProcessor.get_latest_version_string("two_forms")
|
||||
self.assertTrue(version.startswith("v1 "))
|
||||
|
|
Loading…
Reference in New Issue