Merge branch 'dev' into chore/variables-in-forms-550

This commit is contained in:
alicia pritchett 2022-02-17 15:04:41 -05:00
commit a5acd9b07e
25 changed files with 285 additions and 168 deletions

2
Pipfile.lock generated
View File

@ -1266,7 +1266,7 @@
},
"spiffworkflow": {
"git": "https://github.com/sartography/SpiffWorkflow",
"ref": "f857886e718043619807cc3ec836152dff7d31ac"
"ref": "747b0a9cafeb2900264dbc5235c01c2386c55bd1"
},
"sqlalchemy": {
"hashes": [

View File

@ -79,11 +79,11 @@ GITHUB_REPO = environ.get('GITHUB_REPO', None)
TARGET_BRANCH = environ.get('TARGET_BRANCH', None)
# Git settings, used by git_service
# The above Github settings--used in file_service, will likely be deprecated
# You can override these settings in instance/config
GIT_REMOTE_PATH = environ.get('GIT_REMOTE_PATH', None)
GIT_BRANCH = environ.get('GIT_BRANCH', None)
GIT_MERGE_BRANCH = environ.get('GIT_MERGE_BRANCH', None) # Developers can set this to 'all' in instance.config
# Among other things, we use these to build a remote URL like https://username:password@host/path.git
GIT_REMOTE_SERVER = environ.get('GIT_REMOTE_SERVER', None) # example: 'github.com'
GIT_REMOTE_PATH = environ.get('GIT_REMOTE_PATH', None) # example: 'sartography/crconnect-workflow-specs
GIT_BRANCH = environ.get('GIT_BRANCH', None) # example: 'main'
GIT_MERGE_BRANCH = environ.get('GIT_MERGE_BRANCH', None) # Example: 'staging'
# Email configuration
DEFAULT_SENDER = 'uvacrconnect@virginia.edu'

View File

@ -67,6 +67,7 @@ def process_waiting_tasks():
def init_scheduler():
if app.config['PROCESS_WAITING_TASKS']:
scheduler.add_job(process_waiting_tasks, 'interval', minutes=1)
scheduler.add_job(WorkflowService().process_erroring_workflows, 'interval', minutes=1440)
scheduler.start()
@ -121,8 +122,8 @@ def validate_all(study_id, category=None, spec_id=None):
"""Step through all the local workflows and validate them, returning any errors. This make take forever.
Please provide a real study id to use for validation, an optional category can be specified to only validate
that category, and you can further specify a specific spec, if needed."""
from crc.models.workflow import WorkflowSpecModel
from crc.services.workflow_service import WorkflowService
from crc.services.workflow_spec_service import WorkflowSpecService
from crc.api.common import ApiError
from crc.models.study import StudyModel
from crc.models.user import UserModel
@ -131,7 +132,7 @@ def validate_all(study_id, category=None, spec_id=None):
study = session.query(StudyModel).filter(StudyModel.id == study_id).first()
g.user = session.query(UserModel).filter(UserModel.uid == study.user_uid).first()
g.token = "anything_is_fine_just_need_something."
specs = session.query(WorkflowSpecModel).all()
specs = WorkflowSpecService.get_specs()
for spec in specs:
if spec_id and spec_id != spec.id:
continue

View File

@ -2381,10 +2381,9 @@ components:
merge_branch:
type: string
example: staging
# status:
# type: string
# example: staging
changes:
type: array
example: ['file_1.txt', 'file_2.txt']
untracked:
type: array
example: ['a_file.txt', 'b_file.txt']

View File

@ -214,8 +214,12 @@ def restart_workflow(workflow_id, clear_data=False, delete_files=False):
"""Restart a workflow with the latest spec.
Clear data allows user to restart the workflow without previous data."""
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
return get_workflow(workflow_model.id)
processor = WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def get_task_events(action = None, workflow = None, study = None):
@ -254,6 +258,9 @@ def set_current_task(workflow_id, task_id):
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
if not spiff_task:
# An invalid task_id was requested.
raise ApiError("invalid_task", "The Task you requested no longer exists as a part of this workflow.")
_verify_user_and_role(processor, spiff_task)
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:

View File

@ -182,8 +182,8 @@ class LookupFileModel(db.Model):
task_spec_id = db.Column(db.String)
field_id = db.Column(db.String)
file_name = db.Column(db.String)
file_timestamp = db.Column(db.FLOAT) #The file systems time stamp, to check for changes to the file.
is_ldap = db.Column(db.Boolean) # Allows us to run an ldap query instead of a db lookup.
last_updated = db.Column(db.DateTime(timezone=True))
dependencies = db.relationship("LookupDataModel", lazy="select", backref="lookup_file_model",
cascade="all, delete, delete-orphan")

View File

@ -63,7 +63,6 @@ class DocumentService(object):
expand = file.workflow_id == int(workflow_id)
else:
expand = False
print(expand)
categories = [x for x in [doc_code['category1'], doc_code['category2'], doc_code['category3'], file] if x]
DocumentService.ensure_exists(directory, categories, expanded=expand)
return directory
@ -94,8 +93,6 @@ class DocumentService(object):
new_level.expanded = expanded
output.append(new_level)
DocumentService.ensure_exists(new_level.children, categories[1:], expanded)
else:
print("Found it")
else:
new_level = DocumentDirectory(file=current_item)
new_level.expanded = expanded

View File

@ -2,6 +2,8 @@ import datetime
import os
from typing import List
import pytz
from crc import app
from crc.api.common import ApiError
from crc.models.file import FileType, CONTENT_TYPES, File
@ -82,11 +84,17 @@ class FileSystemService(object):
'The file you provided does not have an accepted extension:' +
file_extension, status_code=404)
@staticmethod
def _timestamp(file_path: str):
return os.path.getmtime(file_path)
@staticmethod
def _last_modified(file_path: str):
# Returns the last modified date of the given file.
timestamp = os.path.getmtime(file_path)
return datetime.datetime.fromtimestamp(timestamp)
utc_dt = datetime.datetime.utcfromtimestamp(timestamp)
aware_utc_dt = utc_dt.replace(tzinfo=pytz.utc)
return aware_utc_dt
@staticmethod
def file_type(file_name):
@ -129,6 +137,6 @@ class FileSystemService(object):
raise ApiError("invalid_type", "Invalid File Type: %s, for file %s" % (extension, item.name))
stats = item.stat()
file_size = stats.st_size
last_modified = datetime.datetime.fromtimestamp(stats.st_mtime)
last_modified = FileSystemService._last_modified(item.path)
return File.from_file_system(item.name, file_type, content_type, last_modified, file_size)

View File

@ -29,11 +29,10 @@ class GitService(object):
@staticmethod
def get_remote_url(remote_path):
# we use github
# Note that the 'password' is a token generated by github, not the site password
host = app.config['GIT_REMOTE_SERVER']
username = app.config["GIT_USER_NAME"]
password = app.config["GIT_USER_PASS"]
remote_url = f"https://{username}:{password}@github.com/{remote_path}.git"
remote_url = f"https://{username}:{password}@{host}/{remote_path}.git"
return remote_url
@staticmethod

View File

@ -52,14 +52,16 @@ class LookupService(object):
@staticmethod
def get_lookup_model_for_reference(file_name, value_column, label_column):
timestamp = ReferenceFileService().timestamp(file_name)
lookup_model = db.session.query(LookupFileModel).\
filter(LookupFileModel.file_name == file_name). \
filter(LookupFileModel.workflow_spec_id == None).\
filter(LookupFileModel.file_timestamp == timestamp).\
first() # use "==" not "is none" which does NOT work, and makes this constantly expensive.
if not lookup_model:
logging.warning("!!!! Making a very expensive call to update the lookup model.")
file_data = ReferenceFileService().get_data(file_name)
lookup_model = LookupService.build_lookup_table(file_name, file_data, value_column, label_column)
lookup_model = LookupService.build_lookup_table(file_name, file_data, timestamp, value_column, label_column)
return lookup_model
@staticmethod
@ -76,10 +78,12 @@ class LookupService(object):
if lookup_model:
if lookup_model.is_ldap: # LDAP is always current
is_current = True
else:
elif lookup_model.file_name is not None and lookup_model.file_timestamp is not None:
# In some legacy cases, the lookup model might exist, but not have a file name, in which case we need
# to rebuild.
workflow_spec = WorkflowSpecService().get_spec(workflow.workflow_spec_id)
current_date = SpecFileService.last_modified(workflow_spec, lookup_model.file_name)
is_current = current_date == lookup_model.last_updated
timestamp = SpecFileService.timestamp(workflow_spec, lookup_model.file_name)
is_current = timestamp == lookup_model.file_timestamp
if not is_current:
# Very very very expensive, but we don't know need this till we do.
@ -144,8 +148,9 @@ class LookupService(object):
file = latest_files[0]
file_data = SpecFileService().get_data(workflow_spec, file_name)
timestamp = SpecFileService.timestamp(workflow_spec, file_name)
lookup_model = LookupService.build_lookup_table(file_name, file_data, value_column, label_column,
lookup_model = LookupService.build_lookup_table(file_name, file_data, timestamp, value_column, label_column,
workflow_model.workflow_spec_id, task_spec_id, field_id)
# Use the results of an LDAP request to populate enum field options
@ -164,7 +169,7 @@ class LookupService(object):
return lookup_model
@staticmethod
def build_lookup_table(file_name, file_data, value_column, label_column,
def build_lookup_table(file_name, file_data, timestamp, value_column, label_column,
workflow_spec_id=None, task_spec_id=None, field_id=None):
""" In some cases the lookup table can be very large. This method will add all values to the database
in a way that can be searched and returned via an api call - rather than sending the full set of
@ -200,6 +205,7 @@ class LookupService(object):
field_id=field_id,
task_spec_id=task_spec_id,
file_name=file_name,
file_timestamp=timestamp,
is_ldap=False)
db.session.add(lookup_model)

View File

@ -73,3 +73,11 @@ class ReferenceFileService(FileSystemService):
def delete(file_name):
file_path = ReferenceFileService.file_path(file_name)
os.remove(file_path)
@staticmethod
def last_modified(file_name):
return FileSystemService._last_modified(ReferenceFileService.file_path(file_name))
@staticmethod
def timestamp(file_name):
return FileSystemService._timestamp(ReferenceFileService.file_path(file_name))

View File

@ -76,6 +76,11 @@ class SpecFileService(FileSystemService):
path = SpecFileService.file_path(spec, file_name)
return FileSystemService._last_modified(path)
@staticmethod
def timestamp(spec: WorkflowSpecInfo, file_name: str):
path = SpecFileService.file_path(spec, file_name)
return FileSystemService._timestamp(path)
@staticmethod
def delete_file(spec, file_name):
# Fixme: Remember to remove the lookup files when the spec file is removed.

View File

@ -10,6 +10,7 @@ from ldap3.core.exceptions import LDAPSocketOpenError
from crc import db, session, app
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
from crc.models.email import EmailModel
from crc.models.file import FileModel, File, FileSchema, FileDataModel
from crc.models.ldap import LdapSchema
@ -228,7 +229,6 @@ class StudyService(object):
session.query(StudyAssociated).filter_by(study_id=study_id).delete()
session.query(EmailModel).filter_by(study_id=study_id).delete()
session.query(StudyEvent).filter_by(study_id=study_id).delete()
for workflow in session.query(WorkflowModel).filter_by(study_id=study_id):
StudyService.delete_workflow(workflow.id)
study = session.query(StudyModel).filter_by(id=study_id).first()
@ -244,6 +244,7 @@ class StudyService(object):
session.query(TaskEventModel).filter_by(workflow_id=workflow.id).delete()
files = session.query(FileModel).filter_by(workflow_id=workflow_id).all()
for file in files:
session.query(DataStoreModel).filter(DataStoreModel.file_id == file.id).delete()
session.query(FileDataModel).filter(FileDataModel.file_model_id == file.id).delete()
session.delete(file)

View File

@ -195,66 +195,6 @@ class UserFileService(object):
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
raise ApiError('Delete Failed', "Unable to delete file. ")
@staticmethod
def get_repo_branches():
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
branches = [branch.name for branch in repo.get_branches()]
return branches
@staticmethod
def update_from_github(file_ids, source_target=GithubObject.NotSet):
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(
file_model_id=file_id
).order_by(
desc(FileDataModel.version)
).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name, ref=source_target)
except UnknownObjectException:
return {'error': 'Attempted to update from repository but file was not present'}
else:
file_data_model.data = repo_file.decoded_content
session.add(file_data_model)
session.commit()
@staticmethod
def publish_to_github(file_ids):
target_branch = app.config['TARGET_BRANCH'] if app.config['TARGET_BRANCH'] else GithubObject.NotSet
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(file_model_id=file_id).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name, ref=target_branch)
except UnknownObjectException:
repo.create_file(
path=file_data_model.file_model.name,
message=f'Creating {file_data_model.file_model.name}',
content=file_data_model.data,
branch=target_branch
)
return {'created': True}
else:
updated = repo.update_file(
path=repo_file.path,
message=f'Updating {file_data_model.file_model.name}',
content=file_data_model.data + b'brah-model',
sha=repo_file.sha,
branch=target_branch
)
return {'updated': True}
@staticmethod
def dmn_from_spreadsheet(ss_data):

View File

@ -145,7 +145,6 @@ class WorkflowProcessor(object):
@staticmethod
def reset(workflow_model, clear_data=False, delete_files=False):
print('WorkflowProcessor: reset: ')
# Try to execute a cancel notify
try:

View File

@ -41,6 +41,9 @@ from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_spec_service import WorkflowSpecService
from flask import request
from sentry_sdk import capture_message, push_scope
class WorkflowService(object):
TASK_ACTION_COMPLETE = "COMPLETE"
@ -124,6 +127,53 @@ class WorkflowService(object):
workflow_model.study_id,
str(e)))
@staticmethod
def get_erroring_workflows():
workflows = session.query(WorkflowModel).filter(WorkflowModel.status==WorkflowStatus.erroring).all()
return workflows
@staticmethod
def get_workflow_url(workflow):
base_url = app.config['FRONTEND']
workflow_url = f'https://{base_url}/workflow/{workflow.id}'
return workflow_url
def process_erroring_workflows(self):
workflows = self.get_erroring_workflows()
if len(workflows) > 0:
workflow_urls = []
if len(workflows) == 1:
workflow = workflows[0]
workflow_url_link = self.get_workflow_url(workflow)
workflow_urls.append(workflow_url_link)
message = 'There is one workflow in an error state.'
message += f'\n You can restart the workflow at {workflow_url_link}.'
else:
message = f'There are {len(workflows)} workflows in an error state.'
message += '\nYou can restart the workflows at these URLs:'
for workflow in workflows:
workflow_url_link = self.get_workflow_url(workflow)
workflow_urls.append(workflow_url_link)
message += f'\n{workflow_url_link}'
with push_scope() as scope:
scope.user = {"urls": workflow_urls}
scope.set_extra("workflow_urls", workflow_urls)
# this sends a message through sentry
capture_message(message)
# We return message so we can use it in a test
return message
@staticmethod
def raise_if_disabled(spec_id, study_id):
"""Raise an exception of the workflow is not enabled and can not be executed."""
if study_id is not None:
study_model = session.query(StudyModel).filter(StudyModel.id == study_id).first()
spec_model = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == spec_id).first()
status = StudyService._get_study_status(study_model)
if spec_model.id in status and status[spec_model.id]['status'] == 'disabled':
raise ApiError(code='disabled_workflow', message=f"This workflow is disabled. {status[spec_model.id]['message']}")
@staticmethod
@timeit
def test_spec(spec_id, validate_study_id=None, test_until=None, required_only=False):
@ -527,6 +577,8 @@ class WorkflowService(object):
return FileSchema().dump(file)
elif field.type == 'files':
return random.randrange(1, 100)
elif field.type == 'date':
return datetime.utcnow()
else:
return WorkflowService._random_string()

View File

@ -0,0 +1,30 @@
"""empty message
Revision ID: 3c56c894ff5c
Revises: 29bad12c9945
Create Date: 2022-02-17 11:52:52.335700
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3c56c894ff5c'
down_revision = '29bad12c9945'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('lookup_file', sa.Column('file_timestamp', sa.FLOAT(), nullable=True))
op.drop_column('lookup_file', 'last_updated')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('lookup_file', sa.Column('last_updated', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('lookup_file', 'file_timestamp')
# ### end Alembic commands ###

View File

@ -258,54 +258,13 @@ class ToFilesystemService(object):
def upgrade():
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowSpecCategoryModel, \
WorkflowSpecCategoryModelSchema
""""""
bind = op.get_bind()
session = sa.orm.Session(bind=bind)
op.drop_table('workflow_spec_dependency_file')
op.add_column('lookup_file', sa.Column('file_model_id', sa.Integer(), nullable=True))
op.add_column('lookup_file', sa.Column('last_updated', sa.DateTime(), nullable=True))
op.create_foreign_key(None, 'lookup_file', 'file', ['file_model_id'], ['id'])
processed_files = []
location = SpecFileService.get_sync_file_root()
if os.path.exists(location):
rmtree(location)
# Process workflow spec files
files = session.query(FileModel).filter(FileModel.workflow_spec_id is not None).all()
for file in files:
if file.archived is not True:
ToFilesystemService().write_file_to_system(session, file, location)
processed_files.append(file.id)
# Process reference files
# get_reference_files only returns files where archived is False
reference_files = ReferenceFileService.get_reference_files()
for reference_file in reference_files:
ToFilesystemService().write_file_to_system(session, reference_file, location)
processed_files.append(reference_file.id)
session.flush()
lookups = session.query(LookupFileModel).all()
for lookup in lookups:
session.delete(lookup)
session.commit()
for file_id in processed_files:
processed_data_models = session.query(FileDataModel).filter(FileDataModel.file_model_id==file_id).all()
for processed_data_model in processed_data_models:
session.delete(processed_data_model)
session.commit()
print(f'upgrade: in processed files: file_id: {file_id}')
print('upgrade: done: ')
def downgrade():
# TODO: This is a work in progress, and depends on what we do in upgrade()
op.add_column('lookup_file', sa.Column('file_data_model_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'lookup_file', 'file', ['file_data_model_id'], ['id'])
op.drop_constraint('lookup_file_file_model_id_key', 'lookup_file', type_='foreignkey')
@ -318,8 +277,3 @@ def downgrade():
sa.ForeignKeyConstraint(['workflow_id'], ['workflow.id'], ),
sa.PrimaryKeyConstraint('file_data_id', 'workflow_id')
)
location = SpecFileService.get_sync_file_root()
FromFilesystemService().update_file_metadata_from_filesystem(location)
print('downgrade: ')

View File

@ -19,38 +19,12 @@ depends_on = None
def upgrade():
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
print("Doing the upgrade")
op.execute('ALTER TABLE workflow DROP CONSTRAINT workflow_workflow_spec_id_fkey')
op.execute('ALTER TABLE file DROP CONSTRAINT file_workflow_spec_id_fkey')
op.execute('ALTER TABLE workflow_library DROP CONSTRAINT workflow_library_workflow_spec_id_fkey')
op.execute('ALTER TABLE workflow_library DROP CONSTRAINT workflow_library_library_spec_id_fkey')
op.execute('ALTER TABLE task_event DROP CONSTRAINT task_event_workflow_spec_id_fkey')
# Use Alchemy's connection and transaction to noodle over the data.
connection = op.get_bind()
# Select all existing names that need migrating.
results = connection.execute(sa.select([
WorkflowSpecModel.id,
WorkflowSpecModel.display_name,
])).fetchall()
# Iterate over all selected data tuples.
for id, display_name in results:
new_id = display_name.lower().\
replace(",", "").\
replace("'", "").\
replace(" ", "_").\
replace("-", "_").\
replace(".", "_").\
replace("/","_").\
replace("\\", "_")
old_id = id
op.execute("Update workflow_spec set id='%s' where id='%s'" % (new_id, old_id))
op.execute("Update workflow set workflow_spec_id='%s' where workflow_spec_id='%s'" % (new_id, old_id))
op.execute("Update file set workflow_spec_id='%s' where workflow_spec_id='%s'" % (new_id, old_id))
op.execute("Update workflow_library set workflow_spec_id='%s' where workflow_spec_id='%s'" % (new_id, old_id))
op.execute("Update workflow_library set library_spec_id='%s' where library_spec_id='%s'" % (new_id, old_id))
op.execute("Update task_event set workflow_spec_id='%s' where workflow_spec_id='%s'" % (new_id, old_id))
op.create_foreign_key(
'workflow_workflow_spec_id_fkey',
'workflow', 'workflow_spec',

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_19xdwix" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.2.0">
<bpmn:process id="Process_1wfi0e5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_09e6w2a</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_09e6w2a" sourceRef="StartEvent_1" targetRef="Activity_1lg09if" />
<bpmn:sequenceFlow id="Flow_0cbbsi7" sourceRef="Activity_1lg09if" targetRef="Activity_05yevzg" />
<bpmn:sequenceFlow id="Flow_0dvxkh6" sourceRef="Activity_05yevzg" targetRef="Activity_0phz7ks" />
<bpmn:endEvent id="Event_1ieukoa">
<bpmn:incoming>Flow_19hbirj</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_19hbirj" sourceRef="Activity_0phz7ks" targetRef="Event_1ieukoa" />
<bpmn:userTask id="Activity_1lg09if" name="Get Date" camunda:formKey="DateForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="a_date" label="A Date" type="date">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_09e6w2a</bpmn:incoming>
<bpmn:outgoing>Flow_0cbbsi7</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_05yevzg" name="Modify Date">
<bpmn:incoming>Flow_0cbbsi7</bpmn:incoming>
<bpmn:outgoing>Flow_0dvxkh6</bpmn:outgoing>
<bpmn:script>delta1 = timedelta(hours=2)
format = '%Y-%m-%dT%H:%M:%S.%fZ'
the_date = datetime.datetime.strptime(a_date, format)
modified_date = the_date + delta1
del(delta1)</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Activity_0phz7ks" name="Display Dates">
<bpmn:documentation># Dates
## A Date
{{ a_date }}
</bpmn:documentation>
<bpmn:incoming>Flow_0dvxkh6</bpmn:incoming>
<bpmn:outgoing>Flow_19hbirj</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1wfi0e5">
<bpmndi:BPMNEdge id="Flow_19hbirj_di" bpmnElement="Flow_19hbirj">
<di:waypoint x="690" y="117" />
<di:waypoint x="752" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0dvxkh6_di" bpmnElement="Flow_0dvxkh6">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0cbbsi7_di" bpmnElement="Flow_0cbbsi7">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_09e6w2a_di" bpmnElement="Flow_09e6w2a">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1ieukoa_di" bpmnElement="Event_1ieukoa">
<dc:Bounds x="752" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_05be3cs_di" bpmnElement="Activity_1lg09if">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1s4snzz_di" bpmnElement="Activity_05yevzg">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_15nchcr_di" bpmnElement="Activity_0phz7ks">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -135,6 +135,7 @@ class TestStudyDetailsDocumentsScript(BaseTest):
@patch('crc.services.protocol_builder.requests.get')
def test_file_data_set_invalid_irb_code_fails(self, mock_get):
self.create_reference_document()
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
self.add_studies()

View File

@ -1,5 +1,6 @@
from tests.base_test import BaseTest
from crc import app
from crc.services.git_service import GitService
from unittest.mock import patch, Mock, call
@ -57,6 +58,10 @@ class TestGitService(BaseTest):
self.assertIn(call.index.commit('This is my comment'), method_calls)
self.assertIn(call.remotes.origin.push(), method_calls)
# def test_pull_from_remote(self):
# result = GitService.pull_from_remote()
# print(result)
def test_get_remote_url(self):
app.config['GIT_REMOTE_SERVER'] = 'test_server.com'
app.config['GIT_USER_NAME'] = 'test_username'
app.config['GIT_USER_PASS'] = 'test_pass'
result = GitService.get_remote_url('my_test_path')
self.assertEqual('https://test_username:test_pass@test_server.com/my_test_path.git', result)

View File

@ -6,9 +6,9 @@ from crc.api.common import ApiError
from crc import session, app
from crc.models.file import FileDataModel, FileModel, LookupFileModel, LookupDataModel, CONTENT_TYPES
from crc.services.lookup_service import LookupService
from crc.services.reference_file_service import ReferenceFileService
from crc.services.spec_file_service import SpecFileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.document_service import DocumentService
class TestLookupService(BaseTest):
@ -24,8 +24,13 @@ class TestLookupService(BaseTest):
def test_lookup_table_is_not_created_more_than_once(self):
spec = self.load_test_spec('enum_options_with_search')
workflow = self.create_workflow('enum_options_with_search')
self.assertEqual(0, session.query(LookupFileModel).count())
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "sam", limit=10)
self.assertEqual(1, session.query(LookupFileModel).count())
lookup_table_orig = session.query(LookupFileModel).first()
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "something", limit=10)
lookup_table = session.query(LookupFileModel).first()
self.assertEqual(lookup_table_orig, lookup_table)
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "blah", limit=10)
lookup_records = session.query(LookupFileModel).all()
self.assertIsNotNone(lookup_records)
@ -182,14 +187,24 @@ class TestLookupService(BaseTest):
# Using an old xls file should raise an error
file_data_xls = SpecFileService().get_data(spec, 'sponsors.xls')
timestamp = SpecFileService().timestamp(spec, 'sponsors.xls')
with self.assertRaises(ApiError) as ae:
LookupService.build_lookup_table('sponsors.xls', file_data_xls, 'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
LookupService.build_lookup_table('sponsors.xls', file_data_xls, timestamp, 'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
self.assertIn('Error opening excel file', ae.exception.args[0])
# Using an xlsx file should work
file_data_xlsx = SpecFileService().get_data(spec, 'sponsors.xlsx')
lookup_model = LookupService.build_lookup_table('sponsors.xlsx', file_data_xlsx,
timestamp = SpecFileService().timestamp(spec, 'sponsors.xlsx')
lookup_model = LookupService.build_lookup_table('sponsors.xlsx', file_data_xlsx, timestamp,
'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
self.assertEqual(28, len(lookup_model.dependencies))
self.assertIn('CUSTOMER_NAME', lookup_model.dependencies[0].data.keys())
self.assertIn('CUSTOMER_NUMBER', lookup_model.dependencies[0].data.keys())
def test_lookup_for_reference_caches_properly(self):
self.create_reference_document()
lookup_model_1 = LookupService.get_lookup_model_for_reference(DocumentService.DOCUMENT_LIST,
'code', 'description')
lookup_model_2 = LookupService.get_lookup_model_for_reference(DocumentService.DOCUMENT_LIST,
'code', 'description')
self.assertEqual(lookup_model_1, lookup_model_2)

View File

@ -0,0 +1,22 @@
from tests.base_test import BaseTest
from crc import session
from crc.models.workflow import WorkflowStatus
from crc.services.workflow_service import WorkflowService
class TestErrorWorkflows(BaseTest):
def test_error_workflows(self):
"""We only test whether we have good information in the message.
We do not test whether the message was sent by Sentry."""
workflow_1 = self.create_workflow('random_fact')
workflow_1.status = WorkflowStatus.erroring
workflow_2 = self.create_workflow('random_fact')
workflow_2.status = WorkflowStatus.erroring
session.commit()
message = WorkflowService().process_erroring_workflows()
self.assertIn('There are 2 workflows in an error state.', message)
self.assertIn(f'workflow/{workflow_1.id}', message)
self.assertIn(f'workflow/{workflow_2.id}', message)

View File

@ -0,0 +1,12 @@
from tests.base_test import BaseTest
class TestDateValidation(BaseTest):
def test_date_validation(self):
"""We were not instantiating date fields correctly during validation.
This is a simple test to make sure we seed an actual date in date fields instead of a random string."""
spec_model = self.load_test_spec('date_validation')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertEqual([], rv.json)