Merge branch 'main' into update-workflow-json

This commit is contained in:
mike cullerton 2022-11-18 12:01:47 -05:00
commit adab2ccb92
28 changed files with 263 additions and 218 deletions

View File

@ -120,6 +120,8 @@ class BpmnParser(object):
self.process_parsers_by_name = {} self.process_parsers_by_name = {}
self.collaborations = {} self.collaborations = {}
self.process_dependencies = set() self.process_dependencies = set()
self.messages = {}
self.correlations = {}
def _get_parser_class(self, tag): def _get_parser_class(self, tag):
if tag in self.OVERRIDE_PARSER_CLASSES: if tag in self.OVERRIDE_PARSER_CLASSES:
@ -179,6 +181,8 @@ class BpmnParser(object):
self._add_processes(bpmn, filename) self._add_processes(bpmn, filename)
self._add_collaborations(bpmn) self._add_collaborations(bpmn)
self._add_messages(bpmn)
self._add_correlations(bpmn)
def _add_processes(self, bpmn, filename=None): def _add_processes(self, bpmn, filename=None):
for process in bpmn.xpath('.//bpmn:process', namespaces=self.namespaces): for process in bpmn.xpath('.//bpmn:process', namespaces=self.namespaces):
@ -192,6 +196,43 @@ class BpmnParser(object):
name = collaboration.get('id') name = collaboration.get('id')
self.collaborations[name] = [ participant.get('processRef') for participant in collaboration_xpath('.//bpmn:participant') ] self.collaborations[name] = [ participant.get('processRef') for participant in collaboration_xpath('.//bpmn:participant') ]
def _add_messages(self, bpmn):
for message in bpmn.xpath('.//bpmn:message', namespaces=self.namespaces):
if message.attrib.get("id") is None:
raise ValidationException(
"Message identifier is missing from bpmn xml"
)
self.messages[message.attrib.get("id")] = message.attrib.get("name")
def _add_correlations(self, bpmn):
for correlation in bpmn.xpath('.//bpmn:correlationProperty', namespaces=self.namespaces):
correlation_identifier = correlation.attrib.get("id")
if correlation_identifier is None:
raise ValidationException(
"Correlation identifier is missing from bpmn xml"
)
correlation_property_retrieval_expressions = correlation.xpath(
"//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces)
if not correlation_property_retrieval_expressions:
raise ValidationException(
f"Correlation is missing correlation property retrieval expressions: {correlation_identifier}"
)
retrieval_expressions = []
for cpre in correlation_property_retrieval_expressions:
message_model_identifier = cpre.attrib.get("messageRef")
if message_model_identifier is None:
raise ValidationException(
f"Message identifier is missing from correlation property: {correlation_identifier}"
)
children = cpre.getchildren()
expression = children[0].text if len(children) > 0 else None
retrieval_expressions.append({"messageRef": message_model_identifier,
"expression": expression})
self.correlations[correlation_identifier] = {
"name": correlation.attrib.get("name"),
"retrieval_expressions": retrieval_expressions
}
def _find_dependencies(self, process): def _find_dependencies(self, process):
"""Locate all calls to external BPMN, and store their ids in our list of dependencies""" """Locate all calls to external BPMN, and store their ids in our list of dependencies"""
for call_activity in process.xpath('.//bpmn:callActivity', namespaces=self.namespaces): for call_activity in process.xpath('.//bpmn:callActivity', namespaces=self.namespaces):

View File

@ -44,7 +44,7 @@ class ProcessParser(NodeParser):
self.parsed_nodes = {} self.parsed_nodes = {}
self.lane = lane self.lane = lane
self.spec = None self.spec = None
self.process_executable = True self.process_executable = self.is_executable()
def get_name(self): def get_name(self):
""" """
@ -52,6 +52,35 @@ class ProcessParser(NodeParser):
""" """
return self.node.get('name', default=self.get_id()) return self.node.get('name', default=self.get_id())
def has_lanes(self) -> bool:
"""Returns true if this process has one or more named lanes """
elements = self.xpath("//bpmn:lane")
for el in elements:
if el.get("name"):
return True
return False
def is_executable(self) -> bool:
return self.node.get('isExecutable', 'true') == 'true'
def start_messages(self):
""" This returns a list of messages that would cause this
process to start. """
messages = []
message_event_definitions = self.xpath(
"//bpmn:startEvent/bpmn:messageEventDefinition")
for message_event_definition in message_event_definitions:
message_model_identifier = message_event_definition.attrib.get(
"messageRef"
)
if message_model_identifier is None:
raise ValidationException(
"Could not find messageRef from message event definition: {message_event_definition}"
)
messages.append(message_model_identifier)
return messages
def parse_node(self, node): def parse_node(self, node):
""" """
Parses the specified child task node, and returns the task spec. This Parses the specified child task node, and returns the task spec. This
@ -72,7 +101,6 @@ class ProcessParser(NodeParser):
def _parse(self): def _parse(self):
# here we only look in the top level, We will have another # here we only look in the top level, We will have another
# bpmn:startEvent if we have a subworkflow task # bpmn:startEvent if we have a subworkflow task
self.process_executable = self.node.get('isExecutable', 'true') == 'true'
start_node_list = self.xpath('./bpmn:startEvent') start_node_list = self.xpath('./bpmn:startEvent')
if not start_node_list and self.process_executable: if not start_node_list and self.process_executable:
raise ValidationException("No start event found", node=self.node, filename=self.filename) raise ValidationException("No start event found", node=self.node, filename=self.filename)

View File

@ -81,12 +81,6 @@ class MultiInstanceTask(TaskSpec):
TaskSpec.__init__(self, wf_spec, name, **kwargs) TaskSpec.__init__(self, wf_spec, name, **kwargs)
# DO NOT OVERRIDE THE SPEC TYPE.
# @property
# def spec_type(self):
# return 'MultiInstance Task'
def _find_my_task(self, task): def _find_my_task(self, task):
for thetask in task.workflow.task_tree: for thetask in task.workflow.task_tree:
if thetask.thread_id != task.thread_id: if thetask.thread_id != task.thread_id:
@ -113,17 +107,6 @@ class MultiInstanceTask(TaskSpec):
new_task.triggered = True new_task.triggered = True
output._predict(new_task) output._predict(new_task)
def _check_inputs(self, my_task):
if self.collection is None:
return
# look for variable in context, if we don't find it, default to 1
variable = valueof(my_task, self.times, 1)
if self.times.name == self.collection.name and type(variable) == type([]):
raise WorkflowTaskExecException(my_task,
'If we are updating a collection,'
' then the collection must be a '
'dictionary.')
def _get_loop_completion(self,my_task): def _get_loop_completion(self,my_task):
if not self.completioncondition == None: if not self.completioncondition == None:
terminate = my_task.workflow.script_engine.evaluate(my_task,self.completioncondition) terminate = my_task.workflow.script_engine.evaluate(my_task,self.completioncondition)
@ -154,17 +137,6 @@ class MultiInstanceTask(TaskSpec):
return len(variable.keys()) return len(variable.keys())
return 1 # we shouldn't ever get here, but just in case return a sane value. return 1 # we shouldn't ever get here, but just in case return a sane value.
def _get_current_var(self, my_task, pos):
variable = valueof(my_task, self.times, 1)
if is_number(variable):
return pos
if isinstance(variable,list) and len(variable) >= pos:
return variable[pos - 1]
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
return variable[list(variable.keys())[pos - 1]]
else:
return pos
def _get_predicted_outputs(self, my_task): def _get_predicted_outputs(self, my_task):
split_n = self._get_count(my_task) split_n = self._get_count(my_task)
@ -418,52 +390,60 @@ class MultiInstanceTask(TaskSpec):
if my_task.task_spec.prevtaskclass in classes.keys() and not terminate: if my_task.task_spec.prevtaskclass in classes.keys() and not terminate:
super()._on_complete_hook(my_task) super()._on_complete_hook(my_task)
def _merge_element_variable(self,my_task,collect,runtimes,colvarname): def _check_inputs(self, my_task):
# if we are updating the same collection as was our loopcardinality if self.collection is None:
# then all the keys should be there and we can use the sorted keylist return
# if not, we use an integer - we should be guaranteed that the # look for variable in context, if we don't find it, default to 1
# collection is a dictionary variable = valueof(my_task, self.times, 1)
if self.times.name == self.collection.name and type(variable) == type([]):
raise WorkflowTaskExecException(my_task,
'If we are updating a collection, then the collection must be a dictionary.')
def _get_current_var(self, my_task, pos):
variable = valueof(my_task, self.times, 1)
if is_number(variable):
return pos
if isinstance(variable,list) and len(variable) >= pos:
return variable[pos - 1]
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
return variable[list(variable.keys())[pos - 1]]
else:
return pos
def _merge_element_variable(self, my_task, collect, runtimes):
if self.collection is not None and self.times.name == self.collection.name: if self.collection is not None and self.times.name == self.collection.name:
# Update an existing collection (we used the collection as the cardinality)
keys = list(collect.keys()) keys = list(collect.keys())
if len(keys) < runtimes: if len(keys) < runtimes:
msg = f"There is a mismatch between runtimes and the number " \ msg = f"There is a mismatch between runtimes and the number " \
f"items in the collection, please check for empty " \ f"items in the collection, please check for empty " \
f"collection {self.collection.name}." f"collection {self.collection.name}."
raise WorkflowTaskExecException(my_task, msg) raise WorkflowTaskExecException(my_task, msg)
runtimesvar = keys[runtimes - 1] runtimesvar = keys[runtimes - 1]
else: else:
# Use an integer (for arrays)
runtimesvar = runtimes runtimesvar = runtimes
if self.elementVar in my_task.data and isinstance(my_task.data[self.elementVar], dict): if self.elementVar in my_task.data and isinstance(my_task.data[self.elementVar], dict):
collect[str(runtimesvar)] = DeepMerge.merge(collect.get(runtimesvar, {}), collect[str(runtimesvar)] = DeepMerge.merge(
copy.copy(my_task.data[self.elementVar])) collect.get(runtimesvar, {}),
copy.copy(my_task.data[self.elementVar])
)
my_task.data = DeepMerge.merge(my_task.data, def _update_sibling_data(self, my_task, runtimes, runcount, colvarname, collect):
gendict(colvarname.split('/'), collect))
def _update_sibling_data(self,my_task,runtimes,runcount,colvarname,collect):
if (runtimes < runcount) and not my_task.terminate_current_loop and self.loopTask: if (runtimes < runcount) and not my_task.terminate_current_loop and self.loopTask:
my_task._set_state(TaskState.READY) my_task._set_state(TaskState.READY)
my_task._set_internal_data(runtimes=runtimes + 1) my_task._set_internal_data(runtimes=runtimes + 1)
my_task.data[self.elementVar] = self._get_current_var(my_task, runtimes + 1) my_task.data[self.elementVar] = self._get_current_var(my_task, runtimes + 1)
element_var_data = None
else: else:
# The element var data should not be passed on to children my_task.data.pop(self.elementVar, None)
# but will add this back onto this task later.
element_var_data = my_task.data.pop(self.elementVar, None)
# if this is a parallel mi - then update all siblings with the for task in my_task.parent.children:
# current data task.data = DeepMerge.merge(
if not self.isSequential: task.data,
for task in my_task.parent.children: gendict(colvarname.split('/'), collect)
task.data = DeepMerge.merge( )
task.data,
gendict(colvarname.split('/'),
collect)
)
return element_var_data
def _on_complete_hook(self, my_task): def _on_complete_hook(self, my_task):
# do special stuff for non-user tasks # do special stuff for non-user tasks
@ -486,9 +466,9 @@ class MultiInstanceTask(TaskSpec):
collect = valueof(my_task, self.collection, {}) collect = valueof(my_task, self.collection, {})
self._merge_element_variable(my_task,collect,runtimes,colvarname) self._merge_element_variable(my_task, collect, runtimes)
element_var_data = self._update_sibling_data(my_task,runtimes,runcount,colvarname,collect) self._update_sibling_data(my_task, runtimes, runcount, colvarname, collect)
# please see MultiInstance code for previous version # please see MultiInstance code for previous version
outputs = [] outputs = []
@ -497,14 +477,6 @@ class MultiInstanceTask(TaskSpec):
if not isinstance(my_task.task_spec,SubWorkflowTask): if not isinstance(my_task.task_spec,SubWorkflowTask):
my_task._sync_children(outputs, TaskState.FUTURE) my_task._sync_children(outputs, TaskState.FUTURE)
for child in my_task.children:
child.task_spec._update(child)
# If removed, add the element_var_data back onto this task, after
# updating the children.
if(element_var_data):
my_task.data[self.elementVar] = element_var_data
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_multi_instance(self) return serializer.serialize_multi_instance(self)

View File

@ -66,8 +66,6 @@ class SubWorkflowTask(BpmnSpecMixin):
def _on_complete_hook(self, my_task): def _on_complete_hook(self, my_task):
BpmnSpecMixin._on_complete_hook(self, my_task) BpmnSpecMixin._on_complete_hook(self, my_task)
for child in my_task.children:
child.task_spec._update(child)
def _on_cancel(self, my_task): def _on_cancel(self, my_task):
subworkflow = my_task.workflow.get_subprocess(my_task) subworkflow = my_task.workflow.get_subprocess(my_task)

View File

@ -146,8 +146,6 @@ class UnstructuredJoin(Join, BpmnSpecMixin):
def _update_hook(self, my_task): def _update_hook(self, my_task):
if my_task._is_predicted():
self._predict(my_task)
if not my_task.parent._is_finished(): if not my_task.parent._is_finished():
return return

View File

@ -95,7 +95,8 @@ class DMNEngine:
# If we get here, we need to check whether the match expression includes # If we get here, we need to check whether the match expression includes
# an operator or if can use '==' # an operator or if can use '=='
needs_eq = self.needs_eq(script_engine, match_expr) needs_eq = self.needs_eq(script_engine, match_expr)
expr = input_expr + ' == ' + match_expr if needs_eq else input_expr + match_expr # Disambiguate cases like a == 0 == True when we add '=='
expr = f'({input_expr}) == ({match_expr})' if needs_eq else input_expr + match_expr
return script_engine.evaluate(task, expr) return script_engine.evaluate(task, expr)
@staticmethod @staticmethod

View File

@ -88,8 +88,6 @@ class ExclusiveChoice(MultiChoice):
f'No conditions satisfied for {my_task.task_spec.name}') f'No conditions satisfied for {my_task.task_spec.name}')
my_task._sync_children([output], TaskState.FUTURE) my_task._sync_children([output], TaskState.FUTURE)
for child in my_task.children:
child.task_spec._update(child)
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_exclusive_choice(self) return serializer.serialize_exclusive_choice(self)

View File

@ -134,8 +134,6 @@ class MultiChoice(TaskSpec):
outputs.append(self._wf_spec.get_task_spec_from_name(output)) outputs.append(self._wf_spec.get_task_spec_from_name(output))
my_task._sync_children(outputs, TaskState.FUTURE) my_task._sync_children(outputs, TaskState.FUTURE)
for child in my_task.children:
child.task_spec._update(child)
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_multi_choice(self) return serializer.serialize_multi_choice(self)

View File

@ -102,8 +102,6 @@ class MultiInstance(TaskSpec):
def _on_complete_hook(self, my_task): def _on_complete_hook(self, my_task):
outputs = self._get_predicted_outputs(my_task) outputs = self._get_predicted_outputs(my_task)
my_task._sync_children(outputs, TaskState.FUTURE) my_task._sync_children(outputs, TaskState.FUTURE)
for child in my_task.children:
child.task_spec._update(child)
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_multi_instance(self) return serializer.serialize_multi_instance(self)

View File

@ -76,12 +76,14 @@ class SubWorkflow(TaskSpec):
self, 'File does not exist: %s' % self.file) self, 'File does not exist: %s' % self.file)
def _predict_hook(self, my_task): def _predict_hook(self, my_task):
# Modifying the task spec is a TERRIBLE idea, but if we don't do it, sync_children won't work
outputs = [task.task_spec for task in my_task.children] outputs = [task.task_spec for task in my_task.children]
for output in self.outputs: for output in self.outputs:
if output not in outputs: if output not in outputs:
outputs.insert(0, output) outputs.insert(0, output)
if my_task._is_definite(): if my_task._is_definite():
my_task._sync_children(outputs, TaskState.FUTURE) # This prevents errors with sync children
my_task._sync_children(outputs, TaskState.LIKELY)
else: else:
my_task._sync_children(outputs, my_task.state) my_task._sync_children(outputs, my_task.state)
@ -107,10 +109,7 @@ class SubWorkflow(TaskSpec):
def _integrate_subworkflow_tree(self, my_task, subworkflow): def _integrate_subworkflow_tree(self, my_task, subworkflow):
# Integrate the tree of the subworkflow into the tree of this workflow. # Integrate the tree of the subworkflow into the tree of this workflow.
my_task._sync_children(self.outputs, TaskState.FUTURE) my_task._sync_children(self.outputs, TaskState.LIKELY)
for child in my_task.children:
child.task_spec._update(child)
child._inherit_data()
for child in subworkflow.task_tree.children: for child in subworkflow.task_tree.children:
my_task.children.insert(0, child) my_task.children.insert(0, child)
child.parent = my_task child.parent = my_task
@ -121,10 +120,18 @@ class SubWorkflow(TaskSpec):
for child in subworkflow.task_tree.children: for child in subworkflow.task_tree.children:
for assignment in self.in_assign: for assignment in self.in_assign:
assignment.assign(my_task, child) assignment.assign(my_task, child)
self._predict(my_task)
for child in subworkflow.task_tree.children:
child.task_spec._update(child) child.task_spec._update(child)
# Instead of completing immediately, we'll wait for the subworkflow to complete
my_task._set_state(TaskState.WAITING)
def _update_hook(self, my_task):
subworkflow = my_task._get_internal_data('subworkflow')
if subworkflow is None:
# On the first update, we have to create the subworkflow
super()._update_hook(my_task)
elif subworkflow.is_completed():
# Then wait until it finishes to complete
my_task.complete()
def _on_subworkflow_completed(self, subworkflow, my_task): def _on_subworkflow_completed(self, subworkflow, my_task):
# Assign variables, if so requested. # Assign variables, if so requested.
@ -138,11 +145,6 @@ class SubWorkflow(TaskSpec):
# Alright, abusing that hook is just evil but it works. # Alright, abusing that hook is just evil but it works.
child.task_spec._update_hook(child) child.task_spec._update_hook(child)
def _on_complete_hook(self, my_task):
for child in my_task.children:
if isinstance(child.task_spec, StartTask):
child.task_spec._update(child)
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_sub_workflow(self) return serializer.serialize_sub_workflow(self)

View File

@ -133,8 +133,6 @@ class ThreadSplit(TaskSpec):
for i in range(split_n): for i in range(split_n):
outputs.append(self.thread_starter) outputs.append(self.thread_starter)
my_task._sync_children(outputs, TaskState.FUTURE) my_task._sync_children(outputs, TaskState.FUTURE)
for child in my_task.children:
child.task_spec._update(child)
def serialize(self, serializer): def serialize(self, serializer):
return serializer.serialize_thread_split(self) return serializer.serialize_thread_split(self)

View File

@ -242,24 +242,19 @@ class TaskSpec(object):
:type looked_ahead: integer :type looked_ahead: integer
:param looked_ahead: The depth of the predicted path so far. :param looked_ahead: The depth of the predicted path so far.
""" """
if my_task._is_finished():
return
if seen is None: if seen is None:
seen = [] seen = []
elif self in seen:
return self._predict_hook(my_task)
if not my_task._is_finished():
self._predict_hook(my_task)
if not my_task._is_definite(): if not my_task._is_definite():
if looked_ahead + 1 >= self.lookahead:
return
seen.append(self) seen.append(self)
look_ahead = my_task._is_definite() or looked_ahead + 1 < self.lookahead
for child in my_task.children: for child in my_task.children:
child.task_spec._predict(child, seen[:], looked_ahead + 1) if not child._is_finished() and child not in seen and look_ahead:
child.task_spec._predict(child, seen[:], looked_ahead + 1)
def _predict_hook(self, my_task): def _predict_hook(self, my_task):
# If the task's status is not predicted, we default to FUTURE # If the task's status is not predicted, we default to FUTURE for all it's outputs.
# for all it's outputs.
# Otherwise, copy my own state to the children. # Otherwise, copy my own state to the children.
if my_task._is_definite(): if my_task._is_definite():
best_state = TaskState.FUTURE best_state = TaskState.FUTURE
@ -278,6 +273,12 @@ class TaskSpec(object):
completes it makes sure to call this method so we can react. completes it makes sure to call this method so we can react.
""" """
my_task._inherit_data() my_task._inherit_data()
# We were doing this in _update_hook, but to me that seems inconsistent with the spirit
# of the hook functions. Moving it here allows removal of some repeated calls (overridden
# hook methods still need to do these things)
if my_task._is_predicted():
self._predict(my_task)
self.entered_event.emit(my_task.workflow, my_task)
self._update_hook(my_task) self._update_hook(my_task)
def _update_hook(self, my_task): def _update_hook(self, my_task):
@ -290,11 +291,8 @@ class TaskSpec(object):
Returning non-False will cause the task to go into READY. Returning non-False will cause the task to go into READY.
Returning any other value will cause no action. Returning any other value will cause no action.
""" """
if my_task._is_predicted(): # If this actually did what the documentation said (returned a value indicating
self._predict(my_task) # that the task was ready), then a lot of things might be easier.
if not my_task.parent._is_finished():
return
self.entered_event.emit(my_task.workflow, my_task)
my_task._ready() my_task._ready()
def _on_ready(self, my_task): def _on_ready(self, my_task):
@ -387,21 +385,14 @@ class TaskSpec(object):
""" """
assert my_task is not None assert my_task is not None
if my_task.workflow.debug:
print("Executing %s: %s (%s)" % (
my_task.task_spec.__class__.__name__,
my_task.get_name(), my_task.get_description()))
# We have to set the last task here, because the on_complete_hook # We have to set the last task here, because the on_complete_hook
# of a loopback task may overwrite what the last_task will be. # of a loopback task may overwrite what the last_task will be.
my_task.workflow.last_task = my_task my_task.workflow.last_task = my_task
self._on_complete_hook(my_task) self._on_complete_hook(my_task)
for child in my_task.children:
child.task_spec._update(child)
my_task.workflow._task_completed_notify(my_task) my_task.workflow._task_completed_notify(my_task)
if my_task.workflow.debug:
if hasattr(my_task.workflow, "outer_workflow"):
my_task.workflow.outer_workflow.task_tree.dump()
self.completed_event.emit(my_task.workflow, my_task) self.completed_event.emit(my_task.workflow, my_task)
return True return True
@ -414,9 +405,7 @@ class TaskSpec(object):
:rtype: bool :rtype: bool
:returns: True on success, False otherwise. :returns: True on success, False otherwise.
""" """
# If we have more than one output, implicitly split. pass
for child in my_task.children:
child.task_spec._update(child)
@abstractmethod @abstractmethod
def serialize(self, serializer, **kwargs): def serialize(self, serializer, **kwargs):
@ -478,8 +467,6 @@ class TaskSpec(object):
:rtype: TaskSpec :rtype: TaskSpec
:returns: The task specification instance. :returns: The task specification instance.
""" """
print(s_state)
print(wf_spec)
out = cls(wf_spec,s_state.get('name')) out = cls(wf_spec,s_state.get('name'))
out.id = s_state.get('id') out.id = s_state.get('id')
out.name = s_state.get('name') out.name = s_state.get('name')

View File

@ -39,6 +39,6 @@ class SpiffBpmnTask(BpmnSpecMixin):
self.execute_script(my_task, self.prescript) self.execute_script(my_task, self.prescript)
def _on_complete_hook(self, my_task): def _on_complete_hook(self, my_task):
super()._on_complete_hook(my_task)
if self.postscript is not None: if self.postscript is not None:
self.execute_script(my_task, self.postscript) self.execute_script(my_task, self.postscript)
super()._on_complete_hook(my_task)

View File

@ -522,46 +522,42 @@ class Task(object, metaclass=DeprecatedMetaTask):
""" """
if task_specs is None: if task_specs is None:
raise ValueError('"task_specs" argument is None') raise ValueError('"task_specs" argument is None')
add = task_specs[:] new_children = task_specs[:]
# If a child task_spec is also an ancestor, we are looping back, # If a child task_spec is also an ancestor, we are looping back,
# replace those specs with a loopReset task. # replace those specs with a loopReset task.
root_task = self._get_root() root_task = self._get_root()
for index, task_spec in enumerate(add): for index, task_spec in enumerate(new_children):
ancestor_task = self._find_ancestor(task_spec) ancestor_task = self._find_ancestor(task_spec)
if ancestor_task and ancestor_task != root_task: if ancestor_task and ancestor_task != root_task:
destination = ancestor_task destination = ancestor_task
new_spec = self.workflow.get_reset_task_spec(destination) new_spec = self.workflow.get_reset_task_spec(destination)
new_spec.outputs = [] new_spec.outputs = []
new_spec.inputs = task_spec.inputs new_spec.inputs = task_spec.inputs
add[index] = new_spec new_children[index] = new_spec
# Create a list of all children that are no longer needed. # Create a list of all children that are no longer needed.
remove = [] unneeded_children = []
for child in self.children: for child in self.children:
# Triggered tasks are never removed. # Triggered tasks are never removed.
if child.triggered: if child.triggered:
continue continue
# Check whether the task needs to be removed. # If the task already exists, remove it from to-be-added
if child.task_spec in add: if child.task_spec in new_children:
add.remove(child.task_spec) new_children.remove(child.task_spec)
# We should set the state here but that breaks everything
continue continue
# Non-predicted tasks must not be removed, so they HAVE to be in # Definite tasks must not be removed, so they HAVE to be in the given task spec list.
# the given task spec list.
if child._is_definite(): if child._is_definite():
raise WorkflowException(self.task_spec, raise WorkflowException(self.task_spec, f'removal of non-predicted child {child}')
'removal of non-predicted child %s' % unneeded_children.append(child)
repr(child))
remove.append(child)
# Remove and add the children accordingly. # Remove and add the children accordingly.
for child in remove: for child in unneeded_children:
self.children.remove(child) self.children.remove(child)
for task_spec in add: for task_spec in new_children:
self._add_child(task_spec, state) self._add_child(task_spec, state)
def _set_likely_task(self, task_specs): def _set_likely_task(self, task_specs):
@ -574,7 +570,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
if child._is_definite(): if child._is_definite():
continue continue
child._set_state(TaskState.LIKELY) child._set_state(TaskState.LIKELY)
return
def _is_descendant_of(self, parent): def _is_descendant_of(self, parent):
""" """

View File

@ -19,11 +19,15 @@ class BpmnWorkflowTestCase(unittest.TestCase):
serializer = BpmnWorkflowSerializer(wf_spec_converter) serializer = BpmnWorkflowSerializer(wf_spec_converter)
def load_workflow_spec(self, filename, process_name, validate=True): def get_parser(self, filename, validate=True):
f = os.path.join(os.path.dirname(__file__), 'data', filename) f = os.path.join(os.path.dirname(__file__), 'data', filename)
validator = BpmnValidator() if validate else None validator = BpmnValidator() if validate else None
parser = TestBpmnParser(validator=validator) parser = TestBpmnParser(validator=validator)
parser.add_bpmn_files_by_glob(f) parser.add_bpmn_files_by_glob(f)
return parser
def load_workflow_spec(self, filename, process_name, validate=True):
parser = self.get_parser(filename, validate)
top_level_spec = parser.get_spec(process_name) top_level_spec = parser.get_spec(process_name)
subprocesses = parser.get_subprocess_specs(process_name) subprocesses = parser.get_subprocess_specs(process_name)
return top_level_spec, subprocesses return top_level_spec, subprocesses

View File

@ -6,10 +6,22 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
class CollaborationTest(BpmnWorkflowTestCase): class CollaborationTest(BpmnWorkflowTestCase):
def testParserProvidesInfoOnMessagesAndCorrelations(self):
parser = self.get_parser('collaboration.bpmn')
self.assertEqual(list(parser.messages.keys()), ['love_letter', 'love_letter_response'])
self.assertEqual(parser.correlations,
{'lover_name': {'name': "Lover's Name",
'retrieval_expressions': [
{'expression': 'lover_name',
'messageRef': 'love_letter'},
{'expression': 'from_name',
'messageRef': 'love_letter_response'}]}}
)
def testCollaboration(self): def testCollaboration(self):
spec, subprocesses = self.load_collaboration('collaboration.bpmn', 'my_collaboration') spec, subprocesses = self.load_collaboration('collaboration.bpmn', 'my_collaboration')
# Only executable processes should be started # Only executable processes should be started
self.assertIn('process_buddy', subprocesses) self.assertIn('process_buddy', subprocesses)
self.assertNotIn('random_person_process', subprocesses) self.assertNotIn('random_person_process', subprocesses)
@ -122,4 +134,4 @@ class CollaborationTest(BpmnWorkflowTestCase):
start = self.workflow.get_tasks_from_spec_name('Start')[0] start = self.workflow.get_tasks_from_spec_name('Start')[0]
start.data['lover_name'] = 'Peggy' start.data['lover_name'] = 'Peggy'
self.workflow.do_engine_steps() self.workflow.do_engine_steps()
self.save_restore() self.save_restore()

View File

@ -19,6 +19,12 @@ class SwimLaneTest(BpmnWorkflowTestCase):
spec, subprocesses = self.load_workflow_spec('lanes.bpmn','lanes') spec, subprocesses = self.load_workflow_spec('lanes.bpmn','lanes')
self.workflow = BpmnWorkflow(spec, subprocesses) self.workflow = BpmnWorkflow(spec, subprocesses)
def testBpmnParserKnowsLanesExist(self):
parser = self.get_parser('lanes.bpmn')
self.assertTrue(parser.get_process_parser('lanes').has_lanes())
parser = self.get_parser('random_fact.bpmn')
self.assertFalse(parser.get_process_parser('random_fact').has_lanes())
def testRunThroughHappy(self): def testRunThroughHappy(self):
self.workflow.do_engine_steps() self.workflow.do_engine_steps()

View File

@ -23,13 +23,17 @@ class BaseTestCase(BpmnWorkflowTestCase):
serializer = BpmnWorkflowSerializer(wf_spec_converter) serializer = BpmnWorkflowSerializer(wf_spec_converter)
def load_workflow_spec(self, filename, process_name, dmn_filename=None): def get_parser(self, filename, dmn_filename=None):
bpmn = os.path.join(os.path.dirname(__file__), 'data', filename) f = os.path.join(os.path.dirname(__file__), 'data', filename)
parser = CamundaParser() parser = CamundaParser()
parser.add_bpmn_files_by_glob(bpmn) parser.add_bpmn_files_by_glob(f)
if dmn_filename is not None: if dmn_filename is not None:
dmn = os.path.join(os.path.dirname(__file__), 'data', 'dmn', dmn_filename) dmn = os.path.join(os.path.dirname(__file__), 'data', 'dmn', dmn_filename)
parser.add_dmn_files_by_glob(dmn) parser.add_dmn_files_by_glob(dmn)
return parser
def load_workflow_spec(self, filename, process_name, dmn_filename=None):
parser = self.get_parser(filename, dmn_filename)
top_level_spec = parser.get_spec(process_name) top_level_spec = parser.get_spec(process_name)
subprocesses = parser.get_subprocess_specs(process_name) subprocesses = parser.get_subprocess_specs(process_name)
return top_level_spec, subprocesses return top_level_spec, subprocesses

View File

@ -70,11 +70,9 @@ class MultiInstanceParallelArrayTest(BaseTestCase):
{"CurrentFamilyMember": {"Birthdate": "10/05/1985" + str(x)}}) {"CurrentFamilyMember": {"Birthdate": "10/05/1985" + str(x)}})
self.workflow.do_engine_steps() self.workflow.do_engine_steps()
self.workflow.complete_task_from_id(task.id) self.workflow.complete_task_from_id(task.id)
# The data should still be available on the current task. # We used to check that the current data variable was available in the task,
self.assertEqual({'FirstName': "The Funk #%i" % x, # but there's no reason to preserve it after the task completes. We removed it
'Birthdate': '10/05/1985' + str(x)}, # in some cases and left it in others, which just adds to the confusion.
self.workflow.get_task(task.id)
.data['CurrentFamilyMember'])
self.workflow.do_engine_steps() self.workflow.do_engine_steps()
if save_restore: if save_restore:
self.reload_save_restore() self.reload_save_restore()

View File

@ -44,8 +44,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.workflow.do_engine_steps() self.workflow.do_engine_steps()
if save_restore: self.save_restore() if save_restore: self.save_restore()
self.assertEqual({'current': {'A': 'y'}, self.assertEqual({'do_step': 'Yes',
'do_step': 'Yes',
'output': {'1': {'A': 'x'}, '2': {'A': 'y'}, '3': {'A': 'z'}}}, 'output': {'1': {'A': 'x'}, '2': {'A': 'y'}, '3': {'A': 'z'}}},
self.workflow.last_task.data) self.workflow.last_task.data)
@ -66,8 +65,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.assertTrue(self.workflow.is_completed()) self.assertTrue(self.workflow.is_completed())
self.assertEqual({'current': {'A': 'x'}, self.assertEqual({'do_step': 'Yes',
'do_step': 'Yes',
'C': 'c', 'C': 'c',
'output': {'1': {'A': 'a1'}, 'output': {'1': {'A': 'a1'},
'2': {'A': 'y'}, '2': {'A': 'y'},
@ -75,11 +73,6 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.workflow.last_task.data) self.workflow.last_task.data)
def suite(): def suite():
return unittest.TestLoader().loadTestsFromTestCase(ResetTokenTestMIParallel) return unittest.TestLoader().loadTestsFromTestCase(ResetTokenTestMIParallel)

View File

@ -14,6 +14,16 @@ class StartMessageTest(BaseTestCase):
self.spec, self.subprocesses = self.load_workflow_spec('message_test.bpmn', 'ThrowCatch') self.spec, self.subprocesses = self.load_workflow_spec('message_test.bpmn', 'ThrowCatch')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses) self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
def testParserCanReturnStartMessages(self):
parser = self.get_parser('message_test.bpmn')
self.assertEqual(
parser.process_parsers['ThrowCatch'].start_messages(), ['Message_1rkbi27'])
parser = self.get_parser('random_fact.bpmn')
self.assertEqual(
parser.process_parsers['random_fact'].start_messages(), [])
def testRunThroughHappy(self): def testRunThroughHappy(self):
self.actual_test(save_restore=False) self.actual_test(save_restore=False)

View File

@ -1,11 +1,11 @@
Start Start
first first
excl_choice_1 excl_choice_1
sub_workflow_1
Start Start
first first
excl_choice_1 excl_choice_1
last last
End End
sub_workflow_1
last last
End End

View File

@ -1,10 +1,10 @@
Start Start
first first
sub_workflow_1
Start Start
first first
last last
End End
sub_workflow_1
second second
join join
last last

View File

@ -38,6 +38,8 @@ class TaskSpecTest(unittest.TestCase):
def do_next_unique_task(self, name): def do_next_unique_task(self, name):
# This method asserts that there is only one ready task! The specified # This method asserts that there is only one ready task! The specified
# one - and then completes it # one - and then completes it
for task in self.workflow.get_tasks(TaskState.WAITING):
task.task_spec._update(task)
ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(1, len(ready_tasks)) self.assertEqual(1, len(ready_tasks))
task = ready_tasks[0] task = ready_tasks[0]
@ -58,12 +60,13 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('data', 'block_to_subworkflow.xml') self.load_workflow_spec('data', 'block_to_subworkflow.xml')
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
self.do_next_unique_task('sub_workflow_1')
# Inner: # Inner. The subworkflow task will complete automatically after the subwokflow completes
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
self.do_next_unique_task('last') self.do_next_unique_task('last')
self.do_next_unique_task('End') self.do_next_unique_task('End')
# Back to outer: # Back to outer:
self.do_next_unique_task('last') self.do_next_unique_task('last')
self.do_next_unique_task('End') self.do_next_unique_task('End')
@ -72,7 +75,7 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('data', 'subworkflow_to_block.xml') self.load_workflow_spec('data', 'subworkflow_to_block.xml')
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
self.do_next_unique_task('sub_workflow_1')
# Inner: # Inner:
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
@ -86,8 +89,9 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml') self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
self.do_next_named_step('second', ['sub_workflow_1']) # The subworkflow task now sets its child tasks to READY and waits
self.do_next_unique_task('sub_workflow_1') self.do_next_named_step('second', ['Start'])
# Inner: # Inner:
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
@ -102,8 +106,8 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml') self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')
self.do_next_named_step('second', ['sub_workflow_1']) self.do_next_named_step('second', ['Start'])
self.do_next_unique_task('sub_workflow_1')
# Inner: # Inner:
self.do_next_unique_task('Start') self.do_next_unique_task('Start')
self.do_next_unique_task('first') self.do_next_unique_task('first')

View File

@ -1,7 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from builtins import str
from builtins import range
import time import time
from SpiffWorkflow.task import Task, TaskState from SpiffWorkflow.task import Task, TaskState
from SpiffWorkflow.workflow import Workflow from SpiffWorkflow.workflow import Workflow
@ -33,7 +31,6 @@ def on_reached_cb(workflow, task, taken_path):
props = [] props = []
for key, value in list(task.task_spec.data.items()): for key, value in list(task.task_spec.data.items()):
props.append('='.join((key, str(value)))) props.append('='.join((key, str(value))))
# print "REACHED:", task.get_name(), atts, props
# Store the list of data in the workflow. # Store the list of data in the workflow.
atts = ';'.join(atts) atts = ';'.join(atts)

View File

@ -2,4 +2,4 @@ pip==22.3.1
nox==2022.8.7 nox==2022.8.7
nox-poetry==1.0.2 nox-poetry==1.0.2
poetry==1.2.2 poetry==1.2.2
virtualenv==20.16.6 virtualenv==20.16.7

87
flask-bpmn/poetry.lock generated
View File

@ -491,17 +491,20 @@ flake8 = "*"
[[package]] [[package]]
name = "flake8-rst-docstrings" name = "flake8-rst-docstrings"
version = "0.2.7" version = "0.3.0"
description = "Python docstring reStructuredText (RST) validator" description = "Python docstring reStructuredText (RST) validator for flake8"
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
flake8 = ">=3.0.0" flake8 = ">=3"
pygments = "*" pygments = "*"
restructuredtext-lint = "*" restructuredtext-lint = "*"
[package.extras]
develop = ["build", "twine"]
[[package]] [[package]]
name = "flask" name = "flask"
version = "2.2.2" version = "2.2.2"
@ -595,7 +598,7 @@ tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-s
[[package]] [[package]]
name = "flask-migrate" name = "flask-migrate"
version = "3.1.0" version = "4.0.0"
description = "SQLAlchemy database migrations for Flask applications using Alembic." description = "SQLAlchemy database migrations for Flask applications using Alembic."
category = "main" category = "main"
optional = false optional = false
@ -916,7 +919,7 @@ mypy-extensions = "*"
[[package]] [[package]]
name = "mypy" name = "mypy"
version = "0.990" version = "0.991"
description = "Optional static typing for Python" description = "Optional static typing for Python"
category = "dev" category = "dev"
optional = false optional = false
@ -1501,7 +1504,7 @@ test = ["pytest"]
[[package]] [[package]]
name = "SpiffWorkflow" name = "SpiffWorkflow"
version = "1.2.1" version = "1.2.1"
description = "A workflow framework and BPMN/DMN Processor" description = ""
category = "main" category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
@ -1517,7 +1520,7 @@ lxml = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4" resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46"
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
@ -1767,7 +1770,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.7" python-versions = "^3.7"
content-hash = "59fd1a96fd43ad04e57500a4a2f9c2a8b09279872f0277736d52a827634977a6" content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [
@ -2045,8 +2048,8 @@ flake8-polyfill = [
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
] ]
flake8-rst-docstrings = [ flake8-rst-docstrings = [
{file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8-rst-docstrings-0.3.0.tar.gz", hash = "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4"},
{file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, {file = "flake8_rst_docstrings-0.3.0-py3-none-any.whl", hash = "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c"},
] ]
flask = [ flask = [
{file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
@ -2071,8 +2074,8 @@ flask-marshmallow = [
{file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"},
] ]
flask-migrate = [ flask-migrate = [
{file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, {file = "Flask-Migrate-4.0.0.tar.gz", hash = "sha256:2a301c3040af6844f29d9149abe428a0f08ebc8fa149e72113bbb36fa341920a"},
{file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, {file = "Flask_Migrate-4.0.0-py3-none-any.whl", hash = "sha256:e75a46b657e3062296b9f6e92f31e19662f76cfee8abd6ae94640cbcb79fe016"},
] ]
flask-restful = [ flask-restful = [
{file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"},
@ -2391,36 +2394,36 @@ monkeytype = [
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"}, {file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
] ]
mypy = [ mypy = [
{file = "mypy-0.990-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aaf1be63e0207d7d17be942dcf9a6b641745581fe6c64df9a38deb562a7dbafa"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
{file = "mypy-0.990-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d555aa7f44cecb7ea3c0ac69d58b1a5afb92caa017285a8e9c4efbf0518b61b4"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
{file = "mypy-0.990-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f694d6d09a460b117dccb6857dda269188e3437c880d7b60fa0014fa872d1e9"}, {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
{file = "mypy-0.990-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:269f0dfb6463b8780333310ff4b5134425157ef0d2b1d614015adaf6d6a7eabd"}, {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
{file = "mypy-0.990-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8798c8ed83aa809f053abff08664bdca056038f5a02af3660de00b7290b64c47"}, {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
{file = "mypy-0.990-cp310-cp310-win_amd64.whl", hash = "sha256:47a9955214615108c3480a500cfda8513a0b1cd3c09a1ed42764ca0dd7b931dd"}, {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
{file = "mypy-0.990-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4a8a6c10f4c63fbf6ad6c03eba22c9331b3946a4cec97f008e9ffb4d3b31e8e2"}, {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
{file = "mypy-0.990-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd2dd3730ba894ec2a2082cc703fbf3e95a08479f7be84912e3131fc68809d46"}, {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
{file = "mypy-0.990-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7da0005e47975287a92b43276e460ac1831af3d23032c34e67d003388a0ce8d0"}, {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
{file = "mypy-0.990-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262c543ef24deb10470a3c1c254bb986714e2b6b1a67d66daf836a548a9f316c"}, {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
{file = "mypy-0.990-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ff201a0c6d3ea029d73b1648943387d75aa052491365b101f6edd5570d018ea"}, {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
{file = "mypy-0.990-cp311-cp311-win_amd64.whl", hash = "sha256:1767830da2d1afa4e62b684647af0ff79b401f004d7fa08bc5b0ce2d45bcd5ec"}, {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
{file = "mypy-0.990-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6826d9c4d85bbf6d68cb279b561de6a4d8d778ca8e9ab2d00ee768ab501a9852"}, {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
{file = "mypy-0.990-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46897755f944176fbc504178422a5a2875bbf3f7436727374724842c0987b5af"}, {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
{file = "mypy-0.990-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0680389c34284287fe00e82fc8bccdea9aff318f7e7d55b90d967a13a9606013"}, {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
{file = "mypy-0.990-cp37-cp37m-win_amd64.whl", hash = "sha256:b08541a06eed35b543ae1a6b301590eb61826a1eb099417676ddc5a42aa151c5"}, {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
{file = "mypy-0.990-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:be88d665e76b452c26fb2bdc3d54555c01226fba062b004ede780b190a50f9db"}, {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
{file = "mypy-0.990-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b8f4a8213b1fd4b751e26b59ae0e0c12896568d7e805861035c7a15ed6dc9eb"}, {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
{file = "mypy-0.990-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b6f85c2ad378e3224e017904a051b26660087b3b76490d533b7344f1546d3ff"}, {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
{file = "mypy-0.990-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ee5f99817ee70254e7eb5cf97c1b11dda29c6893d846c8b07bce449184e9466"}, {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
{file = "mypy-0.990-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49082382f571c3186ce9ea0bd627cb1345d4da8d44a8377870f4442401f0a706"}, {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
{file = "mypy-0.990-cp38-cp38-win_amd64.whl", hash = "sha256:aba38e3dd66bdbafbbfe9c6e79637841928ea4c79b32e334099463c17b0d90ef"}, {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
{file = "mypy-0.990-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9d851c09b981a65d9d283a8ccb5b1d0b698e580493416a10942ef1a04b19fd37"}, {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
{file = "mypy-0.990-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d847dd23540e2912d9667602271e5ebf25e5788e7da46da5ffd98e7872616e8e"}, {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
{file = "mypy-0.990-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc6019808580565040cd2a561b593d7c3c646badd7e580e07d875eb1bf35c695"}, {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
{file = "mypy-0.990-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3150d409609a775c8cb65dbe305c4edd7fe576c22ea79d77d1454acd9aeda8"}, {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
{file = "mypy-0.990-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3227f14fe943524f5794679156488f18bf8d34bfecd4623cf76bc55958d229c5"}, {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
{file = "mypy-0.990-cp39-cp39-win_amd64.whl", hash = "sha256:c76c769c46a1e6062a84837badcb2a7b0cdb153d68601a61f60739c37d41cc74"}, {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
{file = "mypy-0.990-py3-none-any.whl", hash = "sha256:8f1940325a8ed460ba03d19ab83742260fa9534804c317224e5d4e5aa588e2d6"}, {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
{file = "mypy-0.990.tar.gz", hash = "sha256:72382cb609142dba3f04140d016c94b4092bc7b4d98ca718740dc989e5271b8d"}, {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
] ]
mypy-extensions = [ mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},

View File

@ -37,7 +37,7 @@ greenlet = "^2.0.1"
pytest = "^6.2.5" pytest = "^6.2.5"
coverage = {extras = ["toml"], version = "^6.5"} coverage = {extras = ["toml"], version = "^6.5"}
safety = "^2.3.1" safety = "^2.3.1"
mypy = "^0.990" mypy = "^0.991"
typeguard = "^2.13.2" typeguard = "^2.13.2"
xdoctest = {extras = ["colors"], version = "^1.1.0"} xdoctest = {extras = ["colors"], version = "^1.1.0"}
sphinx = "^4.3.0" sphinx = "^4.3.0"
@ -55,7 +55,7 @@ bandit = "1.7.2"
flake8-bugbear = "^22.10.27" flake8-bugbear = "^22.10.27"
flake8-docstrings = "^1.6.0" flake8-docstrings = "^1.6.0"
flake8-rst-docstrings = "^0.2.7" flake8-rst-docstrings = "^0.3.0"
pep8-naming = "^0.13.2" pep8-naming = "^0.13.2"
darglint = "^1.8.1" darglint = "^1.8.1"
reorder-python-imports = "^3.9.0" reorder-python-imports = "^3.9.0"