Removed the method get_spec_data_files completly - using get_spec_files and get_spec_data to get this information instead.

Only load the spec data files if you are creating a new workflow, otherwise just deserialize the json.
Removed the stuff about calculaing the version of the spec, as we don't use it.
This commit is contained in:
Dan 2022-01-25 16:10:54 -05:00
parent 9690ebf883
commit 8529465322
6 changed files with 47 additions and 412 deletions

View File

@ -57,18 +57,17 @@ Takes two arguments:
raise ApiError(code="invalid_argument",
message="The given task does not match the given study.")
file_data_model = None
file_data = None
if workflow is not None:
# Get the workflow specification file with the given name.
file_data_models = SpecFileService().get_spec_data_files(
workflow_spec_id=workflow.workflow_spec_id,
workflow_id=workflow.id,
name=file_name)
if len(file_data_models) > 0:
file_data_model = file_data_models[0]
file_models = SpecFileService().get_spec_files(
workflow_spec_id=workflow.workflow_spec_id, file_name=file_name)
if len(file_models) > 0:
file_model = file_models[0]
else:
raise ApiError(code="invalid_argument",
message="Uable to locate a file with the given name.")
file_data = SpecFileService().get_spec_file_data(file_model.id).data
# Get images from file/files fields
if len(args) == 3:
@ -77,7 +76,7 @@ Takes two arguments:
image_file_data = None
try:
return JinjaService().make_template(BytesIO(file_data_model['data']), task.data, image_file_data)
return JinjaService().make_template(BytesIO(file_data), task.data, image_file_data)
except ApiError as ae:
# In some cases we want to provide a very specific error, that does not get obscured when going
# through the python expression engine. We can do that by throwing a WorkflowTaskExecException,

View File

@ -130,19 +130,16 @@ class LookupService(object):
file_name = field.get_property(Task.FIELD_PROP_SPREADSHEET_NAME)
value_column = field.get_property(Task.FIELD_PROP_VALUE_COLUMN)
label_column = field.get_property(Task.FIELD_PROP_LABEL_COLUMN)
latest_files = SpecFileService().get_spec_data_files(workflow_spec_id=workflow_model.workflow_spec_id,
workflow_id=workflow_model.id,
name=file_name)
latest_files = SpecFileService().get_spec_files(workflow_spec_id=workflow_model.workflow_spec_id,
file_name=file_name)
if len(latest_files) < 1:
raise ApiError("invalid_enum", "Unable to locate the lookup data file '%s'" % file_name)
else:
data_dict = latest_files[0]
file = latest_files[0]
file_id = data_dict['meta']['id']
file_name = data_dict['meta']['name']
file_data = data_dict['data']
file_data = SpecFileService().get_spec_file_data(file.id).data
lookup_model = LookupService.build_lookup_table(file_id, file_name, file_data, value_column, label_column,
lookup_model = LookupService.build_lookup_table(file.id, file_name, file_data, value_column, label_column,
workflow_model.workflow_spec_id, task_spec_id, field_id)
# Use the results of an LDAP request to populate enum field options

View File

@ -217,52 +217,6 @@ class SpecFileService(object):
file_path = self.write_spec_file_data_to_system(workflow_spec_model, file_model, file_data)
self.write_spec_file_info_to_system(file_path, file_model)
def get_spec_data_files(self, workflow_spec_id, workflow_id=None, name=None, include_libraries=False):
"""Returns all the files related to a workflow specification.
if `name` is included we only return the file with that name"""
spec_data_files = []
workflow_spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id==workflow_spec_id).first()
workflow_spec_name = workflow_spec.display_name
category_name = self.get_spec_file_category_name(workflow_spec)
sync_file_root = self.get_sync_file_root()
spec_path = os.path.join(sync_file_root,
category_name,
workflow_spec_name)
directory_items = os.scandir(spec_path)
for item in directory_items:
if item.is_file() and not item.name.endswith('json'):
if name is not None and item.name != name:
continue
with open(item.path, 'rb') as f_open:
json_path = f'{item.path}.json'
with open(json_path, 'r') as j_open:
json_data = j_open.read()
json_obj = json.loads(json_data)
file_data = f_open.read()
file_dict = {'meta': json_obj,
'data': file_data}
spec_data_files.append(file_dict)
print('get_spec_data_files')
return spec_data_files
# if workflow_id:
# query = session.query(FileDataModel) \
# .join(WorkflowSpecDependencyFile) \
# .filter(WorkflowSpecDependencyFile.workflow_id == workflow_id) \
# .order_by(FileDataModel.id)
# if name:
# query = query.join(FileModel).filter(FileModel.name == name)
# return query.all()
# else:
# """Returns all the latest files related to a workflow specification"""
# file_models = FileService.get_files(workflow_spec_id=workflow_spec_id,include_libraries=include_libraries)
# latest_data_files = []
# for file_model in file_models:
# if name and file_model.name == name:
# latest_data_files.append(FileService.get_file_data(file_model.id))
# elif not name:
# latest_data_files.append(FileService.get_file_data(file_model.id))
# return latest_data_files
@staticmethod
def get_spec_file_category_name(spec_model):
category_name = None
@ -356,22 +310,24 @@ class SpecFileService(object):
return process_elements[0].attrib['id']
@staticmethod
def get_spec_files(workflow_spec_id, include_libraries=False):
if workflow_spec_id:
if include_libraries:
libraries = session.query(WorkflowLibraryModel).filter(
WorkflowLibraryModel.workflow_spec_id==workflow_spec_id).all()
library_workflow_specs = [x.library_spec_id for x in libraries]
library_workflow_specs.append(workflow_spec_id)
query = session.query(FileModel).filter(FileModel.workflow_spec_id.in_(library_workflow_specs))
else:
query = session.query(FileModel).filter(FileModel.workflow_spec_id == workflow_spec_id)
def get_spec_files(workflow_spec_id, file_name=None, include_libraries=False):
if include_libraries:
libraries = session.query(WorkflowLibraryModel).filter(
WorkflowLibraryModel.workflow_spec_id==workflow_spec_id).all()
library_workflow_specs = [x.library_spec_id for x in libraries]
library_workflow_specs.append(workflow_spec_id)
query = session.query(FileModel).filter(FileModel.workflow_spec_id.in_(library_workflow_specs))
else:
query = session.query(FileModel).filter(FileModel.workflow_spec_id == workflow_spec_id)
query = query.filter(FileModel.archived == False)
query = query.order_by(FileModel.id)
if file_name:
query = query.filter(FileModel.name == file_name)
results = query.all()
return results
query = query.filter(FileModel.archived == False)
query = query.order_by(FileModel.id)
results = query.all()
return results
@staticmethod
def get_workflow_file_data(workflow, file_name):

View File

@ -1,3 +1,5 @@
from typing import List
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.serializer.exceptions import MissingSpecError
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
@ -102,15 +104,11 @@ class WorkflowProcessor(object):
self.workflow_model = workflow_model
if workflow_model.bpmn_workflow_json is None: # The workflow was never started.
self.spec_data_files = SpecFileService().get_spec_data_files(
workflow_spec_id=workflow_model.workflow_spec_id,include_libraries=True)
spec = self.get_spec(self.spec_data_files, workflow_model.workflow_spec_id)
else:
self.spec_data_files = SpecFileService().get_spec_data_files(
workflow_spec_id=workflow_model.workflow_spec_id,
workflow_id=workflow_model.id)
spec = None
spec = None
if workflow_model.bpmn_workflow_json is None:
self.spec_files = SpecFileService().get_spec_files(
workflow_spec_id=workflow_model.workflow_spec_id, include_libraries=True)
spec = self.get_spec(self.spec_files, workflow_model.workflow_spec_id)
self.workflow_spec_id = workflow_model.workflow_spec_id
@ -180,10 +178,6 @@ class WorkflowProcessor(object):
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
# try:
# bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
return bpmn_workflow
def save(self):
@ -198,53 +192,15 @@ class WorkflowProcessor(object):
session.add(self.workflow_model)
session.commit()
def get_version_string(self):
# this could potentially become expensive to load all the data in the data models.
# in which case we might consider using a deferred loader for the actual data, but
# trying not to pre-optimize.
file_data_models = SpecFileService().get_spec_data_files(self.workflow_model.workflow_spec_id,
self.workflow_model.id)
return WorkflowProcessor.__get_version_string_for_data_models(file_data_models)
@staticmethod
def get_latest_version_string_for_spec(spec_id):
file_data_models = SpecFileService().get_spec_data_files(spec_id)
return WorkflowProcessor.__get_version_string_for_data_models(file_data_models)
@staticmethod
def __get_version_string_for_data_models(file_data_models):
"""Version is in the format v[VERSION] (FILE_ID_LIST)
For example, a single bpmn file with only one version would be
v1 (12) Where 12 is the id of the file data model that is used to create the
specification. If multiple files exist, they are added on in
dot notation to both the version number and the file list. So
a Spec that includes a BPMN, DMN, an a Word file all on the first
version would be v1.1.1 (12.45.21)"""
major_version = 0 # The version of the primary file.
minor_version = [] # The versions of the minor files if any.
file_ids = []
for file_data in file_data_models:
file_ids.append(file_data.id)
if file_data.file_model.primary:
major_version = file_data.version
else:
minor_version.append(file_data.version)
minor_version.insert(0, major_version) # Add major version to beginning.
version = ".".join(str(x) for x in minor_version)
files = ".".join(str(x) for x in file_ids)
full_version = "v%s (%s)" % (version, files)
return full_version
@staticmethod
@timeit
def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """
lasttime = firsttime()
spec_data_files = SpecFileService().get_spec_data_files(spec_model.id)
spec_files = SpecFileService().get_spec_files(spec_model.id, include_libraries=True)
lasttime = sincetime('load Files', lasttime)
spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id)
spec = WorkflowProcessor.get_spec(spec_files, spec_model.id)
lasttime = sincetime('get spec', lasttime)
try:
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
@ -268,7 +224,7 @@ class WorkflowProcessor(object):
return parser
@staticmethod
def get_spec(files, workflow_spec_id):
def get_spec(files: List[FileModel], workflow_spec_id):
"""Returns a SpiffWorkflow specification for the given workflow spec,
using the files provided. The Workflow_spec_id is only used to generate
better error messages."""
@ -276,14 +232,15 @@ class WorkflowProcessor(object):
process_id = None
for file in files:
if file['meta']['name'][-4:] == FileType.bpmn.value:
bpmn: etree.Element = etree.fromstring(file['data'])
if file['meta']['primary'] and file['meta']['workflow_spec_id'] == workflow_spec_id:
data = SpecFileService().get_spec_file_data(file.id).data
if file.type == FileType.bpmn:
bpmn: etree.Element = etree.fromstring(data)
if file.primary and file.workflow_spec_id == workflow_spec_id:
process_id = SpecFileService.get_process_id(bpmn)
parser.add_bpmn_xml(bpmn, filename=file['meta']['name'])
elif file['meta']['name'][-3:] == FileType.dmn.value:
dmn: etree.Element = etree.fromstring(file['data'])
parser.add_dmn_xml(dmn, filename=file['meta']['name'])
parser.add_bpmn_xml(bpmn, filename=file.name)
elif file.type == FileType.dmn:
dmn: etree.Element = etree.fromstring(data)
parser.add_dmn_xml(dmn, filename=file.name)
if process_id is None:
raise (ApiError(code="no_primary_bpmn_error",
message="There is no primary BPMN model defined for workflow %s" % workflow_spec_id))
@ -311,19 +268,6 @@ class WorkflowProcessor(object):
else:
return WorkflowStatus.waiting
# def hard_reset(self):
# """Recreate this workflow. This will be useful when a workflow specification changes.
# """
# self.spec_data_files = FileService.get_spec_data_files(workflow_spec_id=self.workflow_spec_id)
# new_spec = WorkflowProcessor.get_spec(self.spec_data_files, self.workflow_spec_id)
# new_bpmn_workflow = BpmnWorkflow(new_spec, script_engine=self._script_engine)
# new_bpmn_workflow.data = self.bpmn_workflow.data
# try:
# new_bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("hard_reset_engine_steps_error", str(we), we.sender)
# self.bpmn_workflow = new_bpmn_workflow
def get_status(self):
return self.status_of(self.bpmn_workflow)

View File

@ -256,10 +256,8 @@ class TestFileService(BaseTest):
def test_get_spec_files(self):
self.load_example_data()
spec = session.query(WorkflowSpecModel.id).first()
spec_files = SpecFileService().get_spec_data_files(spec.id)
spec_files = SpecFileService().get_spec_files(spec.id)
workflow = session.query(WorkflowModel).first()
processor = WorkflowProcessor(workflow)
self.assertIsInstance(processor, WorkflowProcessor)
print('test_get_spec_files')

View File

@ -1,259 +0,0 @@
# from unittest import mock
# from unittest.mock import patch
#
# from tests.base_test import BaseTest
#
# from crc import db
# from crc.api.workflow_sync import get_all_spec_state, \
# get_changed_workflows, \
# get_workflow_spec_files, \
# get_changed_files, \
# get_workflow_specification, \
# sync_changed_files
# from crc.models.workflow import WorkflowSpecModel
# from datetime import datetime
# from crc.services.file_service import FileService
# from crc.services.spec_file_service import SpecFileService
# from crc.services.workflow_sync import WorkflowSyncService
#
# def get_random_fact_pos(othersys):
# """
# Make sure we get the 'random_fact' workflow spec
# no matter what order it is in
# """
# rf2pos = 0
# for pos in range(len(othersys)):
# if othersys[pos]['workflow_spec_id'] == 'random_fact':
# rf2pos = pos
# return rf2pos
#
#
# def get_random_fact_2_pos(othersys):
# """
# Makes sure we get the random_fact2.bpmn file no matter what order it is in
# """
# rf2pos = 0
# for pos in range(len(othersys)):
# if othersys[pos]['filename'] == 'random_fact2.bpmn':
# rf2pos = pos
# return rf2pos
#
#
# class TestWorkflowSync(BaseTest):
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_all_remote_workflows')
# def test_get_no_changes(self, mock_get):
# self.load_example_data()
# othersys = get_all_spec_state()
# mock_get.return_value = othersys
# response = get_changed_workflows('localhost:0000') # not actually used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(response,[])
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_all_remote_workflows')
# def test_remote_workflow_change(self, mock_get):
# self.load_example_data()
# othersys = get_all_spec_state()
# rf2pos = get_random_fact_pos(othersys)
# othersys[rf2pos]['date_created'] = str(datetime.utcnow())
# othersys[rf2pos]['md5_hash'] = '12345'
# mock_get.return_value = othersys
# response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0]['workflow_spec_id'], 'random_fact')
# self.assertEqual(response[0]['location'], 'remote')
# self.assertEqual(response[0]['new'], False)
#
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_all_remote_workflows')
# def test_remote_workflow_has_new(self, mock_get):
# self.load_example_data()
# othersys = get_all_spec_state()
# othersys.append({'workflow_spec_id':'my_new_workflow',
# 'date_created':str(datetime.utcnow()),
# 'md5_hash': '12345'})
# mock_get.return_value = othersys
# response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0]['workflow_spec_id'],'my_new_workflow')
# self.assertEqual(response[0]['location'], 'remote')
# self.assertEqual(response[0]['new'], True)
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_all_remote_workflows')
# def test_local_workflow_has_new(self, mock_get):
# self.load_example_data()
#
# othersys = get_all_spec_state()
# mock_get.return_value = othersys
# wf_spec = WorkflowSpecModel()
# wf_spec.id = 'abcdefg'
# wf_spec.display_name = 'New Workflow - Yum!!'
# wf_spec.name = 'my_new_workflow'
# wf_spec.description = 'yep - its a new workflow'
# wf_spec.category_id = 0
# wf_spec.display_order = 0
# db.session.add(wf_spec)
# db.session.commit()
# SpecFileService.add_workflow_spec_file(wf_spec,'dummyfile.txt','text',b'this is a test')
# # after setting up the test - I realized that this doesn't return anything for
# # a workflow that is new locally - it just returns nothing
# response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(response,[])
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# def test_file_differences_clean_slate(self, mock_get):
# """ This test is basically for coverage"""
# self.load_example_data()
# othersys = get_workflow_spec_files('random_fact')
# mock_get.return_value = othersys
# self.delete_example_data()
# response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(len(response),2)
# self.assertEqual(response[0]['location'], 'remote')
# self.assertEqual(response[0]['new'], True)
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# def test_file_differences(self, mock_get):
# self.load_example_data()
# othersys = get_workflow_spec_files('random_fact')
# rf2pos = get_random_fact_2_pos(othersys)
# othersys[rf2pos]['date_created'] = str(datetime.utcnow())
# othersys[rf2pos]['md5_hash'] = '12345'
# mock_get.return_value = othersys
# response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0]['filename'], 'random_fact2.bpmn')
# self.assertEqual(response[0]['location'], 'remote')
# self.assertEqual(response[0]['new'], False)
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_file_by_hash')
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec')
# def test_workflow_differences(self, workflow_mock, spec_files_mock, file_data_mock):
# self.load_example_data()
# # make a remote workflow that is slightly different from local
# remote_workflow = get_workflow_specification('random_fact')
# self.assertEqual(remote_workflow['display_name'],'Random Fact')
# remote_workflow['description'] = 'This Workflow came from Remote'
# remote_workflow['display_name'] = 'Remote Workflow'
# remote_workflow['library'] = True
# workflow_mock.return_value = remote_workflow
# # change the remote file date and hash
# othersys = get_workflow_spec_files('random_fact')
# rf2pos = get_random_fact_2_pos(othersys)
# othersys[rf2pos]['date_created'] = str(datetime.utcnow())
# othersys[rf2pos]['md5_hash'] = '12345'
# spec_files_mock.return_value = othersys
# # actually go get a different file
# file_data_mock.return_value = self.workflow_sync_response('random_fact2.bpmn')
# response = sync_changed_files('localhost:0000','random_fact') # endpoint not used due to mock
# # now make sure that everything gets pulled over
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0], 'random_fact2.bpmn')
# files = SpecFileService().get_spec_data_files('random_fact')
# md5sums = [str(f.md5_hash) for f in files]
# self.assertEqual('21bb6f9e-0af7-0ab2-0fc7-ec0f94787e58' in md5sums, True)
# new_local_workflow = get_workflow_specification('random_fact')
# self.assertEqual(new_local_workflow['display_name'],'Remote Workflow')
# self.assertTrue(new_local_workflow['library'])
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_file_by_hash')
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# def test_workflow_sync_with_libraries(self, get_remote_workflow_spec_files_mock, get_remote_file_by_hash_mock):
# self.load_example_data()
# # make a remote workflow that is slightly different from local, and add a library to it.
# remote_workflow = get_workflow_specification('random_fact')
# remote_library = self.load_test_spec('two_forms')
# remote_workflow['description'] = 'This Workflow came from Remote'
# remote_workflow['libraries'] = [{'id': remote_library.id, 'name': 'two_forms', 'display_name': "Two Forms"}]
#
# random_workflow_remote_files = get_workflow_spec_files('random_fact')
# rf2pos = get_random_fact_2_pos(random_workflow_remote_files)
# random_workflow_remote_files[rf2pos]['date_created'] = str(datetime.utcnow())
# random_workflow_remote_files[rf2pos]['md5_hash'] = '12345'
# get_remote_workflow_spec_files_mock.return_value = random_workflow_remote_files
# get_remote_file_by_hash_mock.return_value = self.workflow_sync_response('random_fact2.bpmn')
#
# # more mock stuff, but we need to return different things depending on what is asked, so we use the side
# # effect pattern rather than setting a single return_value through a patch.
# def mock_workflow_spec(*args):
# if args[1] == 'random_fact':
# return remote_workflow
# else:
# return get_workflow_specification(args[1])
#
# with mock.patch.object(WorkflowSyncService, 'get_remote_workflow_spec', side_effect=mock_workflow_spec):
# response = sync_changed_files('localhost:0000','random_fact') # endpoint not used due to mock
#
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0], 'random_fact2.bpmn')
# files = SpecFileService().get_spec_data_files('random_fact')
# md5sums = [str(f.md5_hash) for f in files]
# self.assertEqual('21bb6f9e-0af7-0ab2-0fc7-ec0f94787e58' in md5sums, True)
# new_local_workflow = get_workflow_specification('random_fact')
# self.assertEqual(new_local_workflow['display_name'],'Random Fact')
# self.assertEqual(1, len(new_local_workflow['libraries']))
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_file_by_hash')
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# def test_ref_file_differences(self, spec_files_mock, file_data_mock):
# """
# Make sure we copy over a new reference file if it exists
# """
# self.load_example_data()
# # make a remote workflow that is slightly different from local
# othersys = get_workflow_spec_files('REFERENCE_FILES')
# newfile = {'file_model_id':9999,
# 'workflow_spec_id': None,
# 'filename':'test.txt',
# 'type':'txt',
# 'primary':False,
# 'content_type':'text/text',
# 'primary_process_id':None,
# 'date_created':str(datetime.utcnow()),
# 'md5_hash':'12345'
# }
# othersys.append(newfile)
# spec_files_mock.return_value = othersys
# # actually go get a different file
# file_data_mock.return_value = self.workflow_sync_response('test.txt')
# response = sync_changed_files('localhost:0000','REFERENCE_FILES') # endpoint not used due to mock
# # now make sure that everything gets pulled over
# self.assertIsNotNone(response)
# self.assertEqual(len(response),1)
# self.assertEqual(response[0], 'test.txt')
# ref_file = SpecFileService.get_reference_file_data('test.txt')
# self.assertEqual('24a2ab0d-1138-a80a-0b98-ed38894f5a04',str(ref_file.md5_hash))
#
#
#
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec_files')
# @patch('crc.services.workflow_sync.WorkflowSyncService.get_remote_workflow_spec')
# def test_file_deleted(self, workflow_mock, spec_files_mock):
# self.load_example_data()
# remote_workflow = get_workflow_specification('random_fact')
# workflow_mock.return_value = remote_workflow
# othersys = get_workflow_spec_files('random_fact')
# rf2pos = get_random_fact_2_pos(othersys)
# del(othersys[rf2pos])
# spec_files_mock.return_value = othersys
# response = sync_changed_files('localhost:0000','random_fact') # endpoint not used due to mock
# self.assertIsNotNone(response)
# # when we delete a local file, we do not return that it was deleted - just
# # a list of updated files. We may want to change this in the future.
# self.assertEqual(len(response),0)
# files = SpecFileService().get_spec_data_files('random_fact')
# self.assertEqual(len(files),1)
#