diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 97cf7ca4..f0c9eaf3 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -16,10 +16,7 @@ jobs: fail-fast: false matrix: include: - # FIXME: https://github.com/mysql/mysql-connector-python/pull/86 - # put back when poetry update protobuf mysql-connector-python updates protobuf - # right now mysql is forcing protobuf to version 3 - # - { python: "3.11", os: "ubuntu-latest", session: "safety" } + - { python: "3.11", os: "ubuntu-latest", session: "safety" } - { python: "3.11", os: "ubuntu-latest", session: "mypy" } - { python: "3.10", os: "ubuntu-latest", session: "mypy" } - { python: "3.9", os: "ubuntu-latest", session: "mypy" } @@ -176,6 +173,19 @@ jobs: name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} path: "./log/*.log" + # burnettk created an account at https://app.snyk.io/org/kevin-jfx + # and added his SNYK_TOKEN secret under the spiff-arena repo. + snyk: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - name: Run Snyk to check for vulnerabilities + uses: snyk/actions/python@master + with: + args: spiffworkflow-backend + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + run_pre_commit_checks: runs-on: ubuntu-latest defaults: @@ -184,9 +194,6 @@ jobs: steps: - name: Check out the repository uses: actions/checkout@v3.3.0 - with: - # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud - fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v4.2.0 with: @@ -205,9 +212,6 @@ jobs: steps: - name: Check out the repository uses: actions/checkout@v3.3.0 - with: - # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud - fetch-depth: 0 - name: Checkout Samples uses: actions/checkout@v3 with: @@ -281,7 +285,7 @@ jobs: # so just skip everything but main if: github.ref_name == 'main' with: - projectBaseDir: spiffworkflow-frontend + projectBaseDir: spiffworkflow-backend env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/.gitignore b/.gitignore index deaccb3a..22f7178f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ pyrightconfig.json .idea/ t +*~ +.dccache +*~ \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index 01819634..268239fe 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -32,6 +32,11 @@ pipeline { description: 'ID of Jenkins credential for Docker registry.', defaultValue: params.DOCKER_CRED_ID ?: 'MISSING' ) + string( + name: 'DISCORD_WEBHOOK_CRED', + description: 'Name of cretential with Discord webhook', + defaultValue: params.DISCORD_WEBHOOK_CRED ?: "", + ) booleanParam( name: 'PUBLISH', description: 'Publish built Docker images.', @@ -61,6 +66,16 @@ pipeline { image.push(env.DOCKER_TAG) } } } + post { + success { script { + if (params.DISCORD_WEBHOOK_CRED) { + discordNotify( + header: 'SpiffWorkflow Docker image published!', + cred: params.DISCORD_WEBHOOK_CRED, + ) + } + } } + } } } // stages post { @@ -68,3 +83,43 @@ pipeline { cleanup { cleanWs() } } // post } // pipeline + +def discordNotify(Map args=[:]) { + def opts = [ + header: args.header ?: 'Deployment successful!', + title: args.title ?: "${env.JOB_NAME}#${env.BUILD_NUMBER}", + cred: args.cred ?: null, + ] + def repo = [ + url: GIT_URL.minus('.git'), + branch: GIT_BRANCH.minus('origin/'), + commit: GIT_COMMIT.take(8), + prev: ( + env.GIT_PREVIOUS_SUCCESSFUL_COMMIT ?: env.GIT_PREVIOUS_COMMIT ?: 'master' + ).take(8), + ] + wrap([$class: 'BuildUser']) { + BUILD_USER_ID = env.BUILD_USER_ID + } + withCredentials([ + string( + credentialsId: opts.cred, + variable: 'DISCORD_WEBHOOK', + ), + ]) { + discordSend( + link: env.BUILD_URL, + result: currentBuild.currentResult, + webhookURL: env.DISCORD_WEBHOOK, + title: opts.title, + description: """ + ${opts.header} + Image: [`${params.DOCKER_NAME}:${params.DOCKER_TAG}`](https://hub.docker.com/r/${params.DOCKER_NAME}/tags?name=${params.DOCKER_TAG}) + Branch: [`${repo.branch}`](${repo.url}/commits/${repo.branch}) + Commit: [`${repo.commit}`](${repo.url}/commit/${repo.commit}) + Diff: [`${repo.prev}...${repo.commit}`](${repo.url}/compare/${repo.prev}...${repo.commit}) + By: [`${BUILD_USER_ID}`](${repo.url}/commits?author=${BUILD_USER_ID}) + """, + ) + } +} diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/schema/__init__.py b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/schema/__init__.py deleted file mode 100644 index e70a73fa..00000000 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/schema/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""__init.py__""" diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py index 87b292a0..473095e6 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py @@ -72,7 +72,7 @@ def convert_timer_expressions(dct): def add_default_condition_to_cond_task_specs(dct): for spec in [ts for ts in dct['spec']['task_specs'].values() if ts['typename'] == 'ExclusiveGateway']: - if (None, spec['default_task_spec']) not in spec['cond_task_specs']: + if spec['default_task_spec'] is not None and (None, spec['default_task_spec']) not in spec['cond_task_specs']: spec['cond_task_specs'].append({'condition': None, 'task_spec': spec['default_task_spec']}) def create_data_objects_and_io_specs(dct): @@ -111,3 +111,14 @@ def check_multiinstance(dct): specs = [ spec for spec in dct['spec']['task_specs'].values() if 'prevtaskclass' in spec ] if len(specs) > 0: raise VersionMigrationError("This workflow cannot be migrated because it contains MultiInstance Tasks") + +def remove_loop_reset(dct): + task_specs = [spec for spec in dct['spec']['task_specs'].values() if spec['typename'] == 'LoopResetTask'] + for spec in task_specs: + if spec['typename'] == 'LoopResetTask': + tasks = [t for t in dct['tasks'].values() if t['task_spec'] == spec['name']] + for task in tasks: + dct['tasks'].pop(task['id']) + parent = dct['tasks'].get(task['parent']) + parent['children'] = [c for c in parent['children'] if c != task['id']] + dct['spec']['task_specs'].pop(spec['name']) diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_migration.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_migration.py index 6c38ba9d..47e1fe4b 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_migration.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_migration.py @@ -6,6 +6,7 @@ from .version_1_2 import ( add_default_condition_to_cond_task_specs, create_data_objects_and_io_specs, check_multiinstance, + remove_loop_reset, ) def from_version_1_1(old): @@ -23,12 +24,18 @@ def from_version_1_1(old): Data inputs and outputs on process specs were moved inside a BPMNIOSpecification, and are now TaskDataReferences; BpmnDataSpecifications that referred to Data Objects are now DataObjects. + + Multiinstance tasks were completely refactored, in a way that is simply too difficult to + migrate. + + Loop reset tasks were removed. """ new = deepcopy(old) convert_timer_expressions(new) add_default_condition_to_cond_task_specs(new) create_data_objects_and_io_specs(new) check_multiinstance(new) + remove_loop_reset(new) new['VERSION'] = "1.2" return new diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/process_spec.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/process_spec.py index 0d6eeaa2..6255b127 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/process_spec.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/process_spec.py @@ -1,5 +1,4 @@ from ..specs.BpmnProcessSpec import BpmnProcessSpec -from ..specs.MultiInstanceTask import MultiInstanceTask from ..specs.events.IntermediateEvent import _BoundaryEventParent from .helpers.spec import WorkflowSpecConverter diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec.py index c248dc2f..894debc3 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec.py @@ -2,7 +2,6 @@ from .helpers.spec import TaskSpecConverter from ...specs.StartTask import StartTask from ...specs.Simple import Simple -from ...specs.LoopResetTask import LoopResetTask from ..specs.BpmnProcessSpec import _EndJoin from ..specs.BpmnSpecMixin import _BpmnCondition @@ -27,8 +26,6 @@ from ..specs.events.IntermediateEvent import ( ReceiveTask, ) -from ..workflow import BpmnWorkflow - class DefaultTaskSpecConverter(TaskSpecConverter): @@ -50,23 +47,6 @@ class StartTaskConverter(DefaultTaskSpecConverter): super().__init__(StartTask, registry) -class LoopResetTaskConverter(DefaultTaskSpecConverter): - - def __init__(self, registry): - super().__init__(LoopResetTask, registry) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['destination_id'] = str(spec.destination_id) - dct['destination_spec_name'] = spec.destination_spec_name - return dct - - def from_dict(self, dct): - spec = self.task_spec_from_dict(dct) - spec.destination_id = self.registry.convert(spec.destination_id) - return spec - - class EndJoinConverter(DefaultTaskSpecConverter): def __init__(self, registry): super().__init__(_EndJoin, registry) @@ -317,7 +297,6 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [ SimpleTaskConverter, StartTaskConverter, EndJoinConverter, - LoopResetTaskConverter, NoneTaskConverter, UserTaskConverter, ManualTaskConverter, diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py index 64a0868d..5167847e 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py @@ -246,7 +246,7 @@ class BpmnWorkflowSerializer: if isinstance(task_spec, SubWorkflowTask) and task_id in top_dct.get('subprocesses', {}): subprocess_spec = top.subprocess_specs[task_spec.spec] - subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process) + subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, deserializing=True) subprocess_dct = top_dct['subprocesses'].get(task_id, {}) subprocess.data = self.data_converter.restore(subprocess_dct.pop('data')) subprocess.success = subprocess_dct.pop('success') @@ -254,8 +254,12 @@ class BpmnWorkflowSerializer: subprocess.completed_event.connect(task_spec._on_subworkflow_completed, task) top_level_workflow.subprocesses[task.id] = subprocess - for child in [ process_dct['tasks'][c] for c in task_dict['children'] ]: - self.task_tree_from_dict(process_dct, child['id'], task, process, top, top_dct) + for child_task_id in task_dict['children']: + if child_task_id in process_dct['tasks']: + child = process_dct['tasks'][child_task_id] + self.task_tree_from_dict(process_dct, child_task_id, task, process, top, top_dct) + else: + raise ValueError(f"Task {task_id} ({task_spec.name}) has child {child_task_id}, but no such task exists") return task diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py index f5237d1a..90faa5ac 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py @@ -47,9 +47,10 @@ class _EndJoin(UnstructuredJoin): return force or len(waiting_tasks) == 0, waiting_tasks - def _on_complete_hook(self, my_task): - super(_EndJoin, self)._on_complete_hook(my_task) + def _run_hook(self, my_task): + result = super(_EndJoin, self)._run_hook(my_task) my_task.workflow.data.update(my_task.data) + return result class BpmnProcessSpec(WorkflowSpec): diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/InclusiveGateway.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/InclusiveGateway.py index 4bb5eca4..157f0a58 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/InclusiveGateway.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/InclusiveGateway.py @@ -110,11 +110,12 @@ class InclusiveGateway(MultiChoice, UnstructuredJoin): return complete, waiting_tasks - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): outputs = self._get_matching_outputs(my_task) if len(outputs) == 0: raise WorkflowTaskException(f'No conditions satisfied on gateway', task=my_task) my_task._sync_children(outputs, TaskState.FUTURE) + return True @property def spec_type(self): diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/ScriptTask.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/ScriptTask.py index 310ea76f..1332e95a 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/ScriptTask.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/ScriptTask.py @@ -29,14 +29,14 @@ class ScriptEngineTask(Simple, BpmnSpecMixin): """Please override for specific Implementations, see ScriptTask below for an example""" pass - def _on_complete_hook(self, task): + def _run_hook(self, task): try: self._execute(task) - super(ScriptEngineTask, self)._on_complete_hook(task) + super(ScriptEngineTask, self)._run_hook(task) except Exception as exc: task._set_state(TaskState.WAITING) raise exc - + return True class ScriptTask(ScriptEngineTask): diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py index 34675501..aa429624 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py @@ -25,9 +25,6 @@ class SubWorkflowTask(BpmnSpecMixin): def spec_type(self): return 'Subprocess' - def _on_ready_hook(self, my_task): - super()._on_ready_hook(my_task) - def _on_subworkflow_completed(self, subworkflow, my_task): self.update_data(my_task, subworkflow) my_task._set_state(TaskState.READY) diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py index 8801002b..6b36a404 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py @@ -54,14 +54,14 @@ class UnstructuredJoin(Join, BpmnSpecMixin): last_changed = None thread_tasks = [] for task in split_task._find_any(self): - # Ignore tasks from other threads. if task.thread_id != my_task.thread_id: + # Ignore tasks from other threads. (Do we need this condition?) continue - # Ignore my outgoing branches. - if self.split_task and task._is_descendant_of(my_task): - continue - # For an inclusive join, this can happen - it's a future join if not task.parent._is_finished(): + # For an inclusive join, this can happen - it's a future join + continue + if my_task._is_descendant_of(task): + # Skip ancestors (otherwise the branch this task is on will get dropped) continue # We have found a matching instance. thread_tasks.append(task) @@ -77,20 +77,13 @@ class UnstructuredJoin(Join, BpmnSpecMixin): for task in thread_tasks: collected_data.update(task.data) - # Mark the identified task instances as COMPLETED. The exception - # is the most recently changed task, for which we assume READY. - # By setting the state to READY only, we allow for calling - # :class:`Task.complete()`, which leads to the task tree being - # (re)built underneath the node. for task in thread_tasks: - if task == last_changed: - task.data.update(collected_data) - self.entered_event.emit(my_task.workflow, my_task) - task._ready() - else: - task._set_state(TaskState.COMPLETED) + if task != last_changed: + task._set_state(TaskState.CANCELLED) task._drop_children() - + else: + task.data.update(collected_data) + def task_should_set_children_future(self, my_task): return True diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py index a80349ad..5afcfe7e 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py @@ -18,7 +18,6 @@ # 02110-1301 USA from .event_types import ThrowingEvent, CatchingEvent -from .event_definitions import CycleTimerEventDefinition from ..BpmnSpecMixin import BpmnSpecMixin from ....specs.Simple import Simple from ....task import TaskState @@ -67,13 +66,15 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): def spec_type(self): return 'Boundary Event Parent' - def _on_ready_hook(self, my_task): + def _run_hook(self, my_task): # Clear any events that our children might have received and # wait for new events for child in my_task.children: if isinstance(child.task_spec, BoundaryEvent): child.task_spec.event_definition.reset(child) + child._set_state(TaskState.WAITING) + return True def _child_complete_hook(self, child_task): @@ -123,7 +124,7 @@ class BoundaryEvent(CatchingEvent): super(BoundaryEvent, self).catch(my_task, event_definition) # Would love to get rid of this statement and manage in the workflow # However, it is not really compatible with how boundary events work. - my_task.complete() + my_task.run() class EventBasedGateway(CatchingEvent): @@ -135,8 +136,8 @@ class EventBasedGateway(CatchingEvent): def _predict_hook(self, my_task): my_task._sync_children(self.outputs, state=TaskState.MAYBE) - def _on_complete_hook(self, my_task): + def _on_ready_hook(self, my_task): + seen_events = my_task.internal_data.get('seen_events', []) for child in my_task.children: - if not child.task_spec.event_definition.has_fired(child): + if child.task_spec.event_definition not in seen_events: child.cancel() - return super()._on_complete_hook(my_task) \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py index 776f8394..4f6c1322 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py @@ -20,13 +20,14 @@ import re from datetime import datetime, timedelta, timezone from calendar import monthrange -from time import timezone as tzoffset +from time import timezone as tzoffset, altzone as dstoffset, daylight as isdst from copy import deepcopy -from SpiffWorkflow.exceptions import SpiffWorkflowException, WorkflowException +from SpiffWorkflow.exceptions import WorkflowException from SpiffWorkflow.task import TaskState -LOCALTZ = timezone(timedelta(seconds=-1 * tzoffset)) +seconds_from_utc = dstoffset if isdst else tzoffset +LOCALTZ = timezone(timedelta(seconds=-1 * seconds_from_utc)) class EventDefinition(object): @@ -452,42 +453,38 @@ class CycleTimerEventDefinition(TimerEventDefinition): def event_type(self): return 'Cycle Timer' - def has_fired(self, my_task): + def cycle_complete(self, my_task): - if not my_task._get_internal_data('event_fired'): - # Only check for the next cycle when the event has not fired to prevent cycles from being skipped. - event_value = my_task._get_internal_data('event_value') - if event_value is None: - expression = my_task.workflow.script_engine.evaluate(my_task, self.expression) - cycles, start, duration = TimerEventDefinition.parse_iso_recurring_interval(expression) - event_value = {'cycles': cycles, 'next': start.isoformat(), 'duration': duration.total_seconds()} + event_value = my_task._get_internal_data('event_value') + if event_value is None: + # Don't necessarily like this, but it's a lot more staightforward than trying to only create + # a child task on loop iterations after the first + my_task._drop_children() + expression = my_task.workflow.script_engine.evaluate(my_task, self.expression) + cycles, start, duration = TimerEventDefinition.parse_iso_recurring_interval(expression) + event_value = {'cycles': cycles, 'next': start.isoformat(), 'duration': duration.total_seconds()} - if event_value['cycles'] > 0: - next_event = datetime.fromisoformat(event_value['next']) - if next_event < datetime.now(timezone.utc): - my_task._set_internal_data(event_fired=True) - event_value['next'] = (next_event + timedelta(seconds=event_value['duration'])).isoformat() + # When the next timer event passes, return True to allow the parent task to generate another child + # Use event fired to indicate that this timer has completed all cycles and the task can be completed + ready = False + if event_value['cycles'] != 0: + next_event = datetime.fromisoformat(event_value['next']) + if next_event < datetime.now(timezone.utc): + event_value['next'] = (next_event + timedelta(seconds=event_value['duration'])).isoformat() + event_value['cycles'] -= 1 + ready = True + else: + my_task.internal_data.pop('event_value', None) + my_task.internal_data['event_fired'] = True - my_task._set_internal_data(event_value=event_value) - - return my_task._get_internal_data('event_fired', False) + my_task._set_internal_data(event_value=event_value) + return ready def timer_value(self, my_task): event_value = my_task._get_internal_data('event_value') - if event_value is not None and event_value['cycles'] > 0: + if event_value is not None and event_value['cycles'] != 0: return event_value['next'] - def complete(self, my_task): - event_value = my_task._get_internal_data('event_value') - if event_value is not None and event_value['cycles'] == 0: - my_task.internal_data.pop('event_value') - return True - - def complete_cycle(self, my_task): - # Only increment when the task completes - if my_task._get_internal_data('event_value') is not None: - my_task.internal_data['event_value']['cycles'] -= 1 - class MultipleEventDefinition(EventDefinition): @@ -504,11 +501,10 @@ class MultipleEventDefinition(EventDefinition): seen_events = my_task.internal_data.get('seen_events', []) for event in self.event_definitions: - if isinstance(event, (TimerEventDefinition, CycleTimerEventDefinition)): + if isinstance(event, TimerEventDefinition): child = [c for c in my_task.children if c.task_spec.event_definition == event] child[0].task_spec._update_hook(child[0]) - child[0]._set_state(TaskState.MAYBE) - if event.has_fired(my_task): + if event.has_fired(child[0]): seen_events.append(event) if self.parallel: diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py index e1a01853..996f1d9a 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py @@ -57,21 +57,22 @@ class CatchingEvent(Simple, BpmnSpecMixin): if self.event_definition.has_fired(my_task): return True - else: + elif isinstance(self.event_definition, CycleTimerEventDefinition): + if self.event_definition.cycle_complete(my_task): + for output in self.outputs: + child = my_task._add_child(output, TaskState.READY) + child.task_spec._predict(child, mask=TaskState.READY|TaskState.PREDICTED_MASK) + if my_task.state != TaskState.WAITING: + my_task._set_state(TaskState.WAITING) + elif my_task.state != TaskState.WAITING: my_task._set_state(TaskState.WAITING) - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): if isinstance(self.event_definition, MessageEventDefinition): self.event_definition.update_task_data(my_task) - elif isinstance(self.event_definition, CycleTimerEventDefinition): - self.event_definition.complete_cycle(my_task) - if not self.event_definition.complete(my_task): - for output in self.outputs: - my_task._add_child(output) - my_task._set_state(TaskState.WAITING) self.event_definition.reset(my_task) - super(CatchingEvent, self)._on_complete_hook(my_task) + return super(CatchingEvent, self)._run_hook(my_task) # This fixes the problem of boundary events remaining cancelled if the task is reused. # It pains me to add these methods, but unless we can get rid of the loop reset task we're stuck @@ -95,6 +96,7 @@ class ThrowingEvent(Simple, BpmnSpecMixin): super(ThrowingEvent, self).__init__(wf_spec, name, **kwargs) self.event_definition = event_definition - def _on_complete_hook(self, my_task): - super(ThrowingEvent, self)._on_complete_hook(my_task) + def _run_hook(self, my_task): + super(ThrowingEvent, self)._run_hook(my_task) self.event_definition.throw(my_task) + return True diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py b/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py index bdb0a11b..fec316f9 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py @@ -29,7 +29,7 @@ from .specs.events.StartEvent import StartEvent from .specs.SubWorkflowTask import CallActivity from ..task import TaskState, Task from ..workflow import Workflow -from ..exceptions import WorkflowException, WorkflowTaskException +from ..exceptions import TaskNotFoundException, WorkflowException, WorkflowTaskException class BpmnMessage: @@ -251,7 +251,7 @@ class BpmnWorkflow(Workflow): for task in engine_steps: if will_complete_task is not None: will_complete_task(task) - task.complete() + task.run() if did_complete_task is not None: did_complete_task(task) if task.task_spec.name == exit_at: @@ -271,7 +271,10 @@ class BpmnWorkflow(Workflow): for my_task in self.get_tasks(TaskState.WAITING): if will_refresh_task is not None: will_refresh_task(my_task) - my_task.task_spec._update(my_task) + # This seems redundant, but the state could have been updated by another waiting task and no longer be waiting. + # Someday, I would like to get rid of this method, and also do_engine_steps + if my_task.state == TaskState.WAITING: + my_task.task_spec._update(my_task) if did_refresh_task is not None: did_refresh_task(my_task) @@ -279,10 +282,15 @@ class BpmnWorkflow(Workflow): return [t for t in self.get_tasks(workflow=workflow) if t.task_spec.name == name] def get_tasks(self, state=TaskState.ANY_MASK, workflow=None): + # Now that I've revisited and had to ask myself what the hell was I doing, I realize I should comment this tasks = [] top = self._get_outermost_workflow() - wf = workflow or top - for task in Workflow.get_tasks(wf): + # I think it makes more sense to start with the current workflow, which is probably going to be the top + # most of the time anyway + wf = workflow or self + # We can't filter the iterator on the state because we have to subprocesses, and the subprocess task will + # almost surely be in a different state than the tasks we want + for task in Workflow.get_tasks_iterator(wf): subprocess = top.subprocesses.get(task.id) if subprocess is not None: tasks.extend(subprocess.get_tasks(state, subprocess)) @@ -290,42 +298,28 @@ class BpmnWorkflow(Workflow): tasks.append(task) return tasks - def _find_task(self, task_id): - if task_id is None: - raise WorkflowException('task_id is None', task_spec=self.spec) - for task in self.get_tasks(): + def get_task_from_id(self, task_id, workflow=None): + for task in self.get_tasks(workflow=workflow): if task.id == task_id: return task - raise WorkflowException(f'A task with the given task_id ({task_id}) was not found', task_spec=self.spec) + raise TaskNotFoundException(f'A task with the given task_id ({task_id}) was not found', task_spec=self.spec) - def complete_task_from_id(self, task_id): - # I don't even know why we use this stupid function instead of calling task.complete, - # since all it does is search the task tree and call the method - task = self._find_task(task_id) - return task.complete() - - def reset_task_from_id(self, task_id): - task = self._find_task(task_id) - if task.workflow.last_task and task.workflow.last_task.data: - data = task.workflow.last_task.data - return task.reset_token(data) - - def get_ready_user_tasks(self,lane=None): + def get_ready_user_tasks(self, lane=None, workflow=None): """Returns a list of User Tasks that are READY for user action""" if lane is not None: - return [t for t in self.get_tasks(TaskState.READY) + return [t for t in self.get_tasks(TaskState.READY, workflow) if (not self._is_engine_task(t.task_spec)) and (t.task_spec.lane == lane)] else: - return [t for t in self.get_tasks(TaskState.READY) + return [t for t in self.get_tasks(TaskState.READY, workflow) if not self._is_engine_task(t.task_spec)] - def get_waiting_tasks(self): + def get_waiting_tasks(self, workflow=None): """Returns a list of all WAITING tasks""" - return self.get_tasks(TaskState.WAITING) + return self.get_tasks(TaskState.WAITING, workflow) - def get_catching_tasks(self): - return [ task for task in self.get_tasks() if isinstance(task.task_spec, CatchingEvent) ] + def get_catching_tasks(self, workflow=None): + return [task for task in self.get_tasks(workflow=workflow) if isinstance(task.task_spec, CatchingEvent)] def _is_engine_task(self, task_spec): return (not hasattr(task_spec, 'is_engine_task') or task_spec.is_engine_task()) diff --git a/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec.py b/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec.py index 99519351..e5a03e99 100644 --- a/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec.py +++ b/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec.py @@ -5,10 +5,7 @@ from ..specs.model import DecisionTable, Rule, HitPolicy from ..specs.model import Input, InputEntry, Output, OutputEntry from ..engine.DMNEngine import DMNEngine -class BusinessRuleTaskConverter(TaskSpecConverter): - - def __init__(self, registry): - super().__init__(BusinessRuleTask, registry) +class BaseBusinessRuleTaskConverter(TaskSpecConverter): def to_dict(self, spec): dct = self.get_default_attributes(spec) @@ -98,3 +95,8 @@ class BusinessRuleTaskConverter(TaskSpecConverter): rule.outputEntries = [self.output_entry_from_dict(entry, outputs) for entry in dct['output_entries']] return rule + + +class BusinessRuleTaskConverter(BaseBusinessRuleTaskConverter): + def __init__(self, registry): + super().__init__(BusinessRuleTask, registry) \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/dmn/specs/BusinessRuleTask.py b/SpiffWorkflow/SpiffWorkflow/dmn/specs/BusinessRuleTask.py index 020c0391..44af8218 100644 --- a/SpiffWorkflow/SpiffWorkflow/dmn/specs/BusinessRuleTask.py +++ b/SpiffWorkflow/SpiffWorkflow/dmn/specs/BusinessRuleTask.py @@ -1,5 +1,4 @@ -from SpiffWorkflow.exceptions import WorkflowTaskException, WorkflowException, \ - SpiffWorkflowException +from SpiffWorkflow.exceptions import WorkflowTaskException, SpiffWorkflowException from ...specs.Simple import Simple @@ -17,7 +16,6 @@ class BusinessRuleTask(Simple, BpmnSpecMixin): def __init__(self, wf_spec, name, dmnEngine, **kwargs): super().__init__(wf_spec, name, **kwargs) - self.dmnEngine = dmnEngine self.resDict = None @@ -25,11 +23,10 @@ class BusinessRuleTask(Simple, BpmnSpecMixin): def spec_class(self): return 'Business Rule Task' - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): try: - my_task.data = DeepMerge.merge(my_task.data, - self.dmnEngine.result(my_task)) - super(BusinessRuleTask, self)._on_complete_hook(my_task) + my_task.data = DeepMerge.merge(my_task.data, self.dmnEngine.result(my_task)) + super(BusinessRuleTask, self)._run_hook(my_task) except SpiffWorkflowException as we: we.add_note(f"Business Rule Task '{my_task.task_spec.description}'.") raise we @@ -37,4 +34,4 @@ class BusinessRuleTask(Simple, BpmnSpecMixin): error = WorkflowTaskException(str(e), task=my_task) error.add_note(f"Business Rule Task '{my_task.task_spec.description}'.") raise error - + return True diff --git a/SpiffWorkflow/SpiffWorkflow/exceptions.py b/SpiffWorkflow/SpiffWorkflow/exceptions.py index 85e4fb36..3b335062 100644 --- a/SpiffWorkflow/SpiffWorkflow/exceptions.py +++ b/SpiffWorkflow/SpiffWorkflow/exceptions.py @@ -129,3 +129,7 @@ class WorkflowTaskException(WorkflowException): class StorageException(SpiffWorkflowException): pass + + +class TaskNotFoundException(WorkflowException): + pass diff --git a/SpiffWorkflow/SpiffWorkflow/serializer/base.py b/SpiffWorkflow/SpiffWorkflow/serializer/base.py index bbf4cc25..17c7bdc2 100644 --- a/SpiffWorkflow/SpiffWorkflow/serializer/base.py +++ b/SpiffWorkflow/SpiffWorkflow/serializer/base.py @@ -16,9 +16,7 @@ from builtins import object # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA -import re from .. import operators -from .. import specs from ..specs.AcquireMutex import AcquireMutex from ..specs.Cancel import Cancel from ..specs.CancelTask import CancelTask @@ -41,7 +39,6 @@ from ..specs.ThreadSplit import ThreadSplit from ..specs.Transform import Transform from ..specs.Trigger import Trigger from ..specs.WorkflowSpec import WorkflowSpec -from ..specs.LoopResetTask import LoopResetTask # Create a list of tag names out of the spec names. def spec_map(): @@ -68,7 +65,6 @@ def spec_map(): 'transform': Transform, 'trigger': Trigger, 'workflow-spec': WorkflowSpec, - 'loop-reset-task': LoopResetTask, 'task': Simple, } diff --git a/SpiffWorkflow/SpiffWorkflow/serializer/dict.py b/SpiffWorkflow/SpiffWorkflow/serializer/dict.py index ce27c9c2..df1939c9 100644 --- a/SpiffWorkflow/SpiffWorkflow/serializer/dict.py +++ b/SpiffWorkflow/SpiffWorkflow/serializer/dict.py @@ -22,8 +22,7 @@ from base64 import b64encode, b64decode from ..workflow import Workflow from ..util.impl import get_class from ..task import Task -from ..operators import (Attrib, PathAttrib, Equal, NotEqual, - Operator, GreaterThan, LessThan, Match) +from ..operators import (Attrib, PathAttrib, Equal, NotEqual, Operator, GreaterThan, LessThan, Match) from ..specs.base import TaskSpec from ..specs.AcquireMutex import AcquireMutex from ..specs.Cancel import Cancel @@ -44,10 +43,8 @@ from ..specs.SubWorkflow import SubWorkflow from ..specs.ThreadStart import ThreadStart from ..specs.ThreadMerge import ThreadMerge from ..specs.ThreadSplit import ThreadSplit -from ..specs.Transform import Transform from ..specs.Trigger import Trigger from ..specs.WorkflowSpec import WorkflowSpec -from ..specs.LoopResetTask import LoopResetTask from .base import Serializer from .exceptions import TaskNotSupportedError, MissingSpecError import warnings @@ -169,7 +166,6 @@ class DictionarySerializer(Serializer): s_state['defines'] = self.serialize_dict(spec.defines) s_state['pre_assign'] = self.serialize_list(spec.pre_assign) s_state['post_assign'] = self.serialize_list(spec.post_assign) - s_state['locks'] = spec.locks[:] # Note: Events are not serialized; this is documented in # the TaskSpec API docs. @@ -190,7 +186,6 @@ class DictionarySerializer(Serializer): spec.pre_assign = self.deserialize_list(s_state.get('pre_assign', [])) spec.post_assign = self.deserialize_list( s_state.get('post_assign', [])) - spec.locks = s_state.get('locks', [])[:] # We can't restore inputs and outputs yet because they may not be # deserialized yet. So keep the names, and resolve them in the end. spec.inputs = s_state.get('inputs', [])[:] @@ -302,18 +297,6 @@ class DictionarySerializer(Serializer): self.deserialize_task_spec(wf_spec, s_state, spec=spec) return spec - def serialize_loop_reset_task(self, spec): - s_state = self.serialize_task_spec(spec) - s_state['destination_id'] = spec.destination_id - s_state['destination_spec_name'] = spec.destination_spec_name - return s_state - - def deserialize_loop_reset_task(self, wf_spec, s_state): - spec = LoopResetTask(wf_spec, s_state['name'], s_state['destination_id'], - s_state['destination_spec_name']) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - def serialize_join(self, spec): s_state = self.serialize_task_spec(spec) s_state['split_task'] = spec.split_task @@ -555,8 +538,7 @@ class DictionarySerializer(Serializer): del spec.task_specs['Start'] start_task_spec_state = s_state['task_specs']['Start'] - start_task_spec = StartTask.deserialize( - self, spec, start_task_spec_state) + start_task_spec = StartTask.deserialize(self, spec, start_task_spec_state) spec.start = start_task_spec spec.task_specs['Start'] = start_task_spec for name, task_spec_state in list(s_state['task_specs'].items()): @@ -602,20 +584,34 @@ class DictionarySerializer(Serializer): s_state['wf_spec']""" if wf_spec is None: + # The json serializer serializes the spec as a string and then serializes it again, hence this check + # I'm not confident that this is going to actually work, but this serializer is so fundamentally flawed + # that I'm not going to put the effort in to be sure this works. + if isinstance(s_state['wf_spec'], str): + spec_dct = json.loads(s_state['wf_spec']) + else: + spec_dct = s_state['wf_spec'] + reset_specs = [spec['name'] for spec in spec_dct['task_specs'].values() if spec['class'].endswith('LoopResetTask')] + for name in reset_specs: + s_state['wf_spec']['task_specs'].pop(name) wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs) + else: + reset_specs = [] + workflow = wf_class(wf_spec) workflow.data = self.deserialize_dict(s_state['data']) workflow.success = s_state['success'] workflow.spec = wf_spec - workflow.task_tree = self.deserialize_task( - workflow, s_state['task_tree']) + workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs) # Re-connect parents - tasklist = list(workflow.get_tasks()) + tasklist = workflow.get_tasks() for task in tasklist: - task.parent = workflow.get_task(task.parent,tasklist) + if task.parent is not None: + task.parent = workflow.get_task_from_id(task.parent, tasklist) - workflow.last_task = workflow.get_task(s_state['last_task'],tasklist) + if workflow.last_task is not None: + workflow.last_task = workflow.get_task_from_id(s_state['last_task'],tasklist) workflow.update_task_mapping() return workflow @@ -636,81 +632,48 @@ class DictionarySerializer(Serializer): " internal_data to store the subworkflow).") s_state = dict() - - # id s_state['id'] = task.id - - # workflow s_state['workflow_name'] = task.workflow.name - - # parent s_state['parent'] = task.parent.id if task.parent is not None else None - - # children if not skip_children: s_state['children'] = [ self.serialize_task(child) for child in task.children] - - # state s_state['state'] = task.state s_state['triggered'] = task.triggered - - # task_spec s_state['task_spec'] = task.task_spec.name - - # last_state_change s_state['last_state_change'] = task.last_state_change - - # data s_state['data'] = self.serialize_dict(task.data) - - # internal_data s_state['internal_data'] = task.internal_data return s_state - - def deserialize_task(self, workflow, s_state): + def deserialize_task(self, workflow, s_state, ignored_specs=None): assert isinstance(workflow, Workflow) - splits = s_state['task_spec'].split('_') - oldtaskname = s_state['task_spec'] - task_spec = workflow.get_task_spec_from_name(oldtaskname) + old_spec_name = s_state['task_spec'] + if old_spec_name in ignored_specs: + return None + task_spec = workflow.get_task_spec_from_name(old_spec_name) if task_spec is None: - raise MissingSpecError("Unknown task spec: " + oldtaskname) + raise MissingSpecError("Unknown task spec: " + old_spec_name) task = Task(workflow, task_spec) - if getattr(task_spec,'isSequential',False) and \ - s_state['internal_data'].get('splits') is not None: + if getattr(task_spec,'isSequential',False) and s_state['internal_data'].get('splits') is not None: task.task_spec.expanded = s_state['internal_data']['splits'] - - # id task.id = s_state['id'] - - # parent # as the task_tree might not be complete yet # keep the ids so they can be processed at the end task.parent = s_state['parent'] - - # children - task.children = self._deserialize_task_children(task, s_state) - - # state + task.children = self._deserialize_task_children(task, s_state, ignored_specs) task._state = s_state['state'] task.triggered = s_state['triggered'] - - # last_state_change task.last_state_change = s_state['last_state_change'] - - # data task.data = self.deserialize_dict(s_state['data']) - - # internal_data task.internal_data = s_state['internal_data'] return task - def _deserialize_task_children(self, task, s_state): + def _deserialize_task_children(self, task, s_state, ignored_specs): """This may need to be overridden if you need to support deserialization of sub-workflows""" - return [self.deserialize_task(task.workflow, c) - for c in s_state['children']] + children = [self.deserialize_task(task.workflow, c, ignored_specs) for c in s_state['children']] + return [c for c in children if c is not None] \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/serializer/json.py b/SpiffWorkflow/SpiffWorkflow/serializer/json.py index 07a8d348..a7c28efe 100644 --- a/SpiffWorkflow/SpiffWorkflow/serializer/json.py +++ b/SpiffWorkflow/SpiffWorkflow/serializer/json.py @@ -22,24 +22,20 @@ from .dict import DictionarySerializer class JSONSerializer(DictionarySerializer): def serialize_workflow_spec(self, wf_spec, **kwargs): - thedict = super(JSONSerializer, self).serialize_workflow_spec( - wf_spec, **kwargs) + thedict = super(JSONSerializer, self).serialize_workflow_spec(wf_spec, **kwargs) return self._dumps(thedict) def deserialize_workflow_spec(self, s_state, **kwargs): thedict = self._loads(s_state) - return super(JSONSerializer, self).deserialize_workflow_spec( - thedict, **kwargs) + return super(JSONSerializer, self).deserialize_workflow_spec(thedict, **kwargs) def serialize_workflow(self, workflow, **kwargs): - thedict = super(JSONSerializer, self).serialize_workflow( - workflow, **kwargs) + thedict = super(JSONSerializer, self).serialize_workflow(workflow, **kwargs) return self._dumps(thedict) def deserialize_workflow(self, s_state, **kwargs): thedict = self._loads(s_state) - return super(JSONSerializer, self).deserialize_workflow( - thedict, **kwargs) + return super(JSONSerializer, self).deserialize_workflow(thedict, **kwargs) def _object_hook(self, dct): if '__uuid__' in dct: diff --git a/SpiffWorkflow/SpiffWorkflow/serializer/prettyxml.py b/SpiffWorkflow/SpiffWorkflow/serializer/prettyxml.py index d861b8ec..7ac8fdfa 100644 --- a/SpiffWorkflow/SpiffWorkflow/serializer/prettyxml.py +++ b/SpiffWorkflow/SpiffWorkflow/serializer/prettyxml.py @@ -176,8 +176,7 @@ class XmlSerializer(Serializer): threshold_field = start_node.attrib.get('threshold-field', '').lower() file_name = start_node.attrib.get('file', '').lower() file_field = start_node.attrib.get('file-field', '').lower() - kwargs = {'lock': [], - 'data': {}, + kwargs = {'data': {}, 'defines': {}, 'pre_assign': [], 'post_assign': []} @@ -253,10 +252,6 @@ class XmlSerializer(Serializer): elif not isinstance(context, list): context = [context] context.append(node.text) - elif node.tag == 'lock': - if not node.text: - self.raise_parser_exception('Empty %s tag' % node.tag) - kwargs['lock'].append(node.text) elif node.tag == 'pick': if not node.text: self.raise_parser_exception('Empty %s tag' % node.tag) diff --git a/SpiffWorkflow/SpiffWorkflow/serializer/xml.py b/SpiffWorkflow/SpiffWorkflow/serializer/xml.py index c0d0572d..c4d32443 100644 --- a/SpiffWorkflow/SpiffWorkflow/serializer/xml.py +++ b/SpiffWorkflow/SpiffWorkflow/serializer/xml.py @@ -15,15 +15,12 @@ from builtins import str # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA -import re import warnings from lxml import etree from lxml.etree import SubElement from ..workflow import Workflow -from .. import specs, operators from ..task import Task, TaskStateNames -from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual, - GreaterThan, LessThan, Match) +from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual, GreaterThan, LessThan, Match) from ..specs.AcquireMutex import AcquireMutex from ..specs.Cancel import Cancel from ..specs.CancelTask import CancelTask @@ -43,10 +40,8 @@ from ..specs.SubWorkflow import SubWorkflow from ..specs.ThreadStart import ThreadStart from ..specs.ThreadMerge import ThreadMerge from ..specs.ThreadSplit import ThreadSplit -from ..specs.Transform import Transform from ..specs.Trigger import Trigger from ..specs.WorkflowSpec import WorkflowSpec -from ..specs.LoopResetTask import LoopResetTask from .base import Serializer, spec_map, op_map from .exceptions import TaskNotSupportedError @@ -726,31 +721,17 @@ class XmlSerializer(Serializer): workflow.task_tree = self.deserialize_task(workflow, task_tree_elem[0]) # Re-connect parents - for task in workflow.get_tasks(): - task.parent = workflow.get_task(task.parent) + for task in workflow.get_tasks_iterator(): + if task.parent is not None: + task.parent = workflow.get_task_from_id(task.parent) # last_task last_task = elem.findtext('last-task') if last_task is not None: - workflow.last_task = workflow.get_task(last_task) + workflow.last_task = workflow.get_task_from_id(last_task) return workflow - def serialize_loop_reset_task(self, spec): - elem = etree.Element('loop-reset-task') - SubElement(elem, 'destination_id').text = str(spec.destination_id) - SubElement(elem, 'destination_spec_name').text = str(spec.destination_spec_name) - return self.serialize_task_spec(spec, elem) - - def deserialize_loop_reset_task(self, wf_spec, elem, cls=LoopResetTask, **kwargs): - destination_id = elem.findtext('destination_id') - destination_spec_name = elem.findtext('destination_spec_name') - - task = self.deserialize_task_spec(wf_spec, elem, cls, - destination_id=destination_id, - destination_spec_name=destination_spec_name) - return task - def serialize_task(self, task, skip_children=False): assert isinstance(task, Task) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/Cancel.py b/SpiffWorkflow/SpiffWorkflow/specs/Cancel.py index bac74c22..06643bf7 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/Cancel.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/Cancel.py @@ -55,9 +55,9 @@ class Cancel(TaskSpec): if len(self.outputs) > 0: raise WorkflowException('Cancel with an output.', task_spec=self) - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): my_task.workflow.cancel(self.cancel_successfully) - TaskSpec._on_complete_hook(self, my_task) + return True def serialize(self, serializer): return serializer.serialize_cancel(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/CancelTask.py b/SpiffWorkflow/SpiffWorkflow/specs/CancelTask.py index ca30c542..0c885711 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/CancelTask.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/CancelTask.py @@ -16,7 +16,6 @@ # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA -from .base import TaskSpec from .Trigger import Trigger @@ -30,12 +29,12 @@ class CancelTask(Trigger): parallel split. """ - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): for task_name in self.context: cancel_tasks = my_task.workflow.get_task_spec_from_name(task_name) for cancel_task in my_task._get_root()._find_any(cancel_tasks): cancel_task.cancel() - TaskSpec._on_complete_hook(self, my_task) + return True def serialize(self, serializer): return serializer.serialize_cancel_task(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/Celery.py b/SpiffWorkflow/SpiffWorkflow/specs/Celery.py index 5728c52b..96e2ecd6 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/Celery.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/Celery.py @@ -117,7 +117,7 @@ class Celery(TaskSpec): self.call = call or [] self.args = call_args or {} self.merge_results = merge_results - skip = 'data', 'defines', 'pre_assign', 'post_assign', 'lock' + skip = 'data', 'defines', 'pre_assign', 'post_assign' self.kwargs = dict(i for i in list(kwargs.items()) if i[0] not in skip) self.result_key = result_key diff --git a/SpiffWorkflow/SpiffWorkflow/specs/Choose.py b/SpiffWorkflow/SpiffWorkflow/specs/Choose.py index cca281a0..ee30bc05 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/Choose.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/Choose.py @@ -55,7 +55,7 @@ class Choose(Trigger): self.context = context self.choice = choice is not None and choice or [] - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): context = my_task.workflow.get_task_spec_from_name(self.context) triggered = [] for task in my_task.workflow.task_tree: @@ -66,7 +66,7 @@ class Choose(Trigger): triggered.append(task) for task in triggered: context._predict(task) - TaskSpec._on_complete_hook(self, my_task) + return True def serialize(self, serializer): return serializer.serialize_choose(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/ExclusiveChoice.py b/SpiffWorkflow/SpiffWorkflow/specs/ExclusiveChoice.py index 24607e30..5eaf52a9 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/ExclusiveChoice.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/ExclusiveChoice.py @@ -61,16 +61,7 @@ class ExclusiveChoice(MultiChoice): if self.default_task_spec is None: raise WorkflowException('A default output is required.', task_spec=self) - def _predict_hook(self, my_task): - # If the task's status is not predicted, we default to MAYBE - # for all it's outputs except the default choice, which is - # LIKELY. - # Otherwise, copy my own state to the children. - my_task._sync_children(self.outputs) - spec = self._wf_spec.get_task_spec_from_name(self.default_task_spec) - my_task._set_likely_task(spec) - - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): output = self._wf_spec.get_task_spec_from_name(self.default_task_spec) for condition, spec_name in self.cond_task_specs: @@ -82,6 +73,10 @@ class ExclusiveChoice(MultiChoice): raise WorkflowException(f'No conditions satisfied for {my_task.task_spec.name}', task_spec=self) my_task._sync_children([output], TaskState.FUTURE) + for child in my_task.children: + child.task_spec._predict(child, mask=TaskState.FUTURE|TaskState.PREDICTED_MASK) + + return True def serialize(self, serializer): return serializer.serialize_exclusive_choice(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/Join.py b/SpiffWorkflow/SpiffWorkflow/specs/Join.py index 2a429161..63c36562 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/Join.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/Join.py @@ -120,8 +120,7 @@ class Join(TaskSpec): # If the task is predicted with less outputs than he has # children, that means the prediction may be incomplete (for # example, because a prediction is not yet possible at this time). - if not child._is_definite() \ - and len(child.task_spec.outputs) > len(child.children): + if child._is_predicted() and len(child.task_spec.outputs) > len(child.children): return True return False @@ -153,8 +152,8 @@ class Join(TaskSpec): # Look at the tree to find all places where this task is used. tasks = [] - for input in self.inputs: - tasks += my_task.workflow.task_mapping[my_task.thread_id][input] + for spec in self.inputs: + tasks.extend([ t for t in my_task.workflow.task_tree._find_any(spec) if t.thread_id == my_task.thread_id ]) # Look up which tasks have already completed. waiting_tasks = [] @@ -162,7 +161,7 @@ class Join(TaskSpec): for task in tasks: if task.parent is None or task._has_state(TaskState.COMPLETED): completed += 1 - else: + elif not task._is_finished(): waiting_tasks.append(task) # If the threshold was reached, get ready to fire. @@ -186,8 +185,6 @@ class Join(TaskSpec): waiting_tasks = [] completed = 0 for task in tasks: - # Refresh path prediction. - task.task_spec._predict(task) if not self._branch_may_merge_at(task): completed += 1 elif self._branch_is_complete(task): @@ -204,16 +201,16 @@ class Join(TaskSpec): Returns True if the threshold was reached, False otherwise. Also returns the list of tasks that yet need to be completed. """ - # If the threshold was already reached, there is nothing else to do. - if my_task._has_state(TaskState.COMPLETED): - return True, None + if my_task._is_finished(): + return False, None if my_task._has_state(TaskState.READY): return True, None # Check whether we may fire. if self.split_task is None: return self._check_threshold_unstructured(my_task, force) - return self._check_threshold_structured(my_task, force) + else: + return self._check_threshold_structured(my_task, force) def _update_hook(self, my_task): # Check whether enough incoming branches have completed. @@ -224,22 +221,16 @@ class Join(TaskSpec): if self.cancel_remaining: for task in waiting_tasks: task.cancel() - # Update the state of our child objects. self._do_join(my_task) - else: + return True + elif not my_task._is_finished(): my_task._set_state(TaskState.WAITING) - def _do_join(self, my_task): + def _find_tasks(self, my_task): split_task = self._get_split_task(my_task) - # Identify all corresponding task instances within the thread. - # Also remember which of those instances was most recently changed, - # because we are making this one the instance that will - # continue the thread of control. In other words, we will continue - # to build the task tree underneath the most recently changed task. - last_changed = None thread_tasks = [] for task in split_task._find_any(self): # Ignore tasks from other threads. @@ -248,27 +239,16 @@ class Join(TaskSpec): # Ignore my outgoing branches. if self.split_task and task._is_descendant_of(my_task): continue - # We have found a matching instance. thread_tasks.append(task) + return thread_tasks - # Check whether the state of the instance was recently - # changed. - changed = task.parent.last_state_change - if last_changed is None or changed > last_changed.parent.last_state_change: - last_changed = task + def _do_join(self, my_task): - # Mark the identified task instances as COMPLETED. The exception - # is the most recently changed task, for which we assume READY. - # By setting the state to READY only, we allow for calling - # :class:`Task.complete()`, which leads to the task tree being - # (re)built underneath the node. - for task in thread_tasks: - if task == last_changed: - self.entered_event.emit(my_task.workflow, my_task) - task._ready() - else: - task._set_state(TaskState.COMPLETED) + # Execution will continue from this task; mark others as cancelled + for task in self._find_tasks(my_task): + if task != my_task: + task._set_state(TaskState.CANCELLED) task._drop_children() def _on_trigger(self, my_task): @@ -276,10 +256,11 @@ class Join(TaskSpec): May be called to fire the Join before the incoming branches are completed. """ - for task in my_task.workflow.task_tree._find_any(self): - if task.thread_id != my_task.thread_id: - continue - self._do_join(task) + tasks = sorted(self._find_tasks(my_task), key=lambda t: t.last_state_change) + for task in tasks[:-1]: + task._set_state(TaskState.CANCELLED) + task._drop_children() + tasks[-1]._ready() def serialize(self, serializer): return serializer.serialize_join(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/LoopResetTask.py b/SpiffWorkflow/SpiffWorkflow/specs/LoopResetTask.py deleted file mode 100644 index 582ad987..00000000 --- a/SpiffWorkflow/SpiffWorkflow/specs/LoopResetTask.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (C) 2021 Sartography -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA - - -from .base import TaskSpec -from ..task import TaskState -from SpiffWorkflow.exceptions import WorkflowTaskException - - -class LoopResetTask(TaskSpec): - - """ - This task is used as a placeholder when we are going to loopback - to a previous point in the workflow. When this task is completed, - it will reset the workflow back to a previous point. - """ - - def __init__(self, wf_spec, name, destination_id, destination_spec_name, **kwargs): - """ - Constructor. - - :param script: the script that must be executed by the script engine. - """ - super(LoopResetTask, self).__init__(wf_spec, name, **kwargs) - self.destination_id = destination_id - self.destination_spec_name = destination_spec_name - - def _on_complete_hook(self, task): - try: - # Prefer the exact task id, but if not available, use the - # last instance of the task_spec. - destination = task.workflow.get_task(self.destination_id) - if not destination: - destination = task.workflow.get_tasks_from_spec_name( - self.destination_spec_name)[-1] - - destination.reset_token(task.data, reset_data=False) - except Exception as e: - # set state to WAITING (because it is definitely not COMPLETED) - # and raise WorkflowException pointing to this task because - # maybe upstream someone will be able to handle this situation - task._set_state(TaskState.WAITING) - if isinstance(e, WorkflowTaskException): - e.add_note('Error occurred during a loop back to a previous step.') - raise e - else: - raise WorkflowTaskException( - 'Error during loop back:' + str(e), task=task, exception=e) - super(LoopResetTask, self)._on_complete_hook(task) - - def serialize(self, serializer): - return serializer.serialize_loop_reset_task(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_loop_reset_task(wf_spec, s_state) - diff --git a/SpiffWorkflow/SpiffWorkflow/specs/MultiChoice.py b/SpiffWorkflow/SpiffWorkflow/specs/MultiChoice.py index bcb998b6..dc2aa194 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/MultiChoice.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/MultiChoice.py @@ -89,32 +89,18 @@ class MultiChoice(TaskSpec): # The caller needs to make sure that predict() is called. def _predict_hook(self, my_task): - if self.choice: - outputs = [self._wf_spec.get_task_spec_from_name(o) - for o in self.choice] - else: - outputs = self.outputs - - # Default to MAYBE for all conditional outputs, default to LIKELY - # for unconditional ones. We can not default to FUTURE, because - # a call to trigger() may override the unconditional paths. - my_task._sync_children(outputs) - if not my_task._is_definite(): - best_state = my_task.state - else: - best_state = TaskState.LIKELY - - # Collect a list of all unconditional outputs. - outputs = [] + conditional, unconditional = [], [] for condition, output in self.cond_task_specs: - if condition is None: - outputs.append(self._wf_spec.get_task_spec_from_name(output)) - - for child in my_task.children: - if child._is_definite(): + if self.choice is not None and output not in self.choice: continue - if child.task_spec in outputs: - child._set_state(best_state) + if condition is None: + unconditional.append(self._wf_spec.get_task_spec_from_name(output)) + else: + conditional.append(self._wf_spec.get_task_spec_from_name(output)) + state = TaskState.MAYBE if my_task.state == TaskState.MAYBE else TaskState.LIKELY + my_task._sync_children(unconditional, state) + for spec in conditional: + my_task._add_child(spec, TaskState.MAYBE) def _get_matching_outputs(self, my_task): outputs = [] @@ -125,12 +111,12 @@ class MultiChoice(TaskSpec): outputs.append(self._wf_spec.get_task_spec_from_name(output)) return outputs - def _on_complete_hook(self, my_task): - """ - Runs the task. Should not be called directly. - Returns True if completed, False otherwise. - """ + def _run_hook(self, my_task): + """Runs the task. Should not be called directly.""" my_task._sync_children(self._get_matching_outputs(my_task), TaskState.FUTURE) + for child in my_task.children: + child.task_spec._predict(child, mask=TaskState.FUTURE|TaskState.PREDICTED_MASK) + return True def serialize(self, serializer): return serializer.serialize_multi_choice(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/MultiInstance.py b/SpiffWorkflow/SpiffWorkflow/specs/MultiInstance.py index a1abf3f0..9efea52a 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/MultiInstance.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/MultiInstance.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -from builtins import range # Copyright (C) 2007 Samuel Abels # # This library is free software; you can redistribute it and/or @@ -75,33 +74,24 @@ class MultiInstance(TaskSpec): for output in self.outputs: new_task = my_task._add_child(output, state) new_task.triggered = True - output._predict(new_task) + output._predict(new_task, mask=TaskState.FUTURE|TaskState.READY|TaskState.PREDICTED_MASK) def _get_predicted_outputs(self, my_task): split_n = int(valueof(my_task, self.times, 1)) - - # Predict the outputs. - outputs = [] - for i in range(split_n): - outputs += self.outputs - return outputs + return self.outputs * split_n def _predict_hook(self, my_task): - split_n = int(valueof(my_task, self.times, 1)) - my_task._set_internal_data(splits=split_n) - - # Create the outgoing tasks. - outputs = [] - for i in range(split_n): - outputs += self.outputs + outputs = self._get_predicted_outputs(my_task) if my_task._is_definite(): my_task._sync_children(outputs, TaskState.FUTURE) else: my_task._sync_children(outputs, TaskState.LIKELY) - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): outputs = self._get_predicted_outputs(my_task) my_task._sync_children(outputs, TaskState.FUTURE) + self._predict(my_task, mask=TaskState.FUTURE|TaskState.PREDICTED_MASK) + return True def serialize(self, serializer): return serializer.serialize_multi_instance(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/ReleaseMutex.py b/SpiffWorkflow/SpiffWorkflow/specs/ReleaseMutex.py index 59e5dcc4..d2efb271 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/ReleaseMutex.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/ReleaseMutex.py @@ -47,10 +47,10 @@ class ReleaseMutex(TaskSpec): TaskSpec.__init__(self, wf_spec, name, **kwargs) self.mutex = mutex - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): mutex = my_task.workflow._get_mutex(self.mutex) mutex.unlock() - TaskSpec._on_complete_hook(self, my_task) + return True def serialize(self, serializer): return serializer.serialize_release_mutex(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/SubWorkflow.py b/SpiffWorkflow/SpiffWorkflow/specs/SubWorkflow.py index d0fc7449..177891fa 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/SubWorkflow.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/SubWorkflow.py @@ -98,41 +98,31 @@ class SubWorkflow(TaskSpec): xml = etree.parse(fp).getroot() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file_name) outer_workflow = my_task.workflow.outer_workflow - return Workflow(wf_spec, parent=outer_workflow) - - def _on_ready_before_hook(self, my_task): - subworkflow = self._create_subworkflow(my_task) - subworkflow.completed_event.connect( - self._on_subworkflow_completed, my_task) - self._integrate_subworkflow_tree(my_task, subworkflow) - my_task._set_internal_data(subworkflow=subworkflow) - - def _integrate_subworkflow_tree(self, my_task, subworkflow): - # Integrate the tree of the subworkflow into the tree of this workflow. - my_task._sync_children(self.outputs, TaskState.LIKELY) + subworkflow = Workflow(wf_spec, parent=outer_workflow) + my_task._sync_children(self.outputs, TaskState.FUTURE) for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task + child.state = TaskState.READY + subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) + my_task._set_internal_data(subworkflow=subworkflow) + my_task._set_state(TaskState.WAITING) - def _on_ready_hook(self, my_task): + def _run_hook(self, my_task): # Assign variables, if so requested. subworkflow = my_task._get_internal_data('subworkflow') for child in subworkflow.task_tree.children: for assignment in self.in_assign: assignment.assign(my_task, child) child.task_spec._update(child) - # Instead of completing immediately, we'll wait for the subworkflow to complete - my_task._set_state(TaskState.WAITING) + return True def _update_hook(self, my_task): - super()._update_hook(my_task) subworkflow = my_task._get_internal_data('subworkflow') if subworkflow is None: - # On the first update, we have to create the subworkflow - return True + self._create_subworkflow(my_task) elif subworkflow.is_completed(): - # Then wait until it finishes to complete my_task.complete() def _on_subworkflow_completed(self, subworkflow, my_task): diff --git a/SpiffWorkflow/SpiffWorkflow/specs/ThreadMerge.py b/SpiffWorkflow/SpiffWorkflow/specs/ThreadMerge.py index 514316ae..b522913b 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/ThreadMerge.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/ThreadMerge.py @@ -62,8 +62,7 @@ class ThreadMerge(Join): # task that did the conditional parallel split. split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: - msg = 'Join with %s, which was not reached' % self.split_task - raise WorkflowException(msg, task_spec=self) + raise WorkflowException(f'Join with %s, which was not reached {self.split_task}', task_spec=self) tasks = split_task.task_spec._get_activated_threads(split_task) # The default threshold is the number of threads that were started. @@ -105,8 +104,7 @@ class ThreadMerge(Join): my_task._set_state(TaskState.WAITING) return - split_task_spec = my_task.workflow.get_task_spec_from_name( - self.split_task) + split_task_spec = my_task.workflow.get_task_spec_from_name(self.split_task) split_task = my_task._find_ancestor(split_task_spec) # Find the inbound task that was completed last. diff --git a/SpiffWorkflow/SpiffWorkflow/specs/ThreadSplit.py b/SpiffWorkflow/SpiffWorkflow/specs/ThreadSplit.py index d25e8388..7b6471ff 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/ThreadSplit.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/ThreadSplit.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -from builtins import range # Copyright (C) 2007 Samuel Abels # # This library is free software; you can redistribute it and/or @@ -108,31 +107,25 @@ class ThreadSplit(TaskSpec): new_task = my_task.add_child(output, TaskState.READY) new_task.triggered = True - def _predict_hook(self, my_task): + def _get_predicted_outputs(self, my_task): split_n = int(valueof(my_task, self.times)) + return [self.thread_starter] * split_n + def _predict_hook(self, my_task): # if we were created with thread_starter suppressed, connect it now. if self.thread_starter is None: self.thread_starter = self.outputs[0] - # Predict the outputs. - outputs = [] - for i in range(split_n): - outputs.append(self.thread_starter) + outputs = self._get_predicted_outputs(my_task) if my_task._is_definite(): my_task._sync_children(outputs, TaskState.FUTURE) else: my_task._sync_children(outputs, TaskState.LIKELY) - def _on_complete_hook(self, my_task): - # Split, and remember the number of splits in the context data. - split_n = int(valueof(my_task, self.times)) - - # Create the outgoing tasks. - outputs = [] - for i in range(split_n): - outputs.append(self.thread_starter) + def _run_hook(self, my_task): + outputs = self._get_predicted_outputs(my_task) my_task._sync_children(outputs, TaskState.FUTURE) + return True def serialize(self, serializer): return serializer.serialize_thread_split(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/ThreadStart.py b/SpiffWorkflow/SpiffWorkflow/specs/ThreadStart.py index bf51b1a9..65e27a81 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/ThreadStart.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/ThreadStart.py @@ -17,6 +17,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA from .base import TaskSpec +from SpiffWorkflow.task import TaskState class ThreadStart(TaskSpec): @@ -42,9 +43,10 @@ class ThreadStart(TaskSpec): TaskSpec.__init__(self, wf_spec, name, **kwargs) self.internal = True - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): my_task._assign_new_thread_id() - TaskSpec._on_complete_hook(self, my_task) + my_task._sync_children(self.outputs, TaskState.READY) + return True def serialize(self, serializer): return serializer.serialize_thread_start(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/Trigger.py b/SpiffWorkflow/SpiffWorkflow/specs/Trigger.py index 3042b7d2..c8d5fbfc 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/Trigger.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/Trigger.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -from builtins import range # Copyright (C) 2007 Samuel Abels # # This library is free software; you can redistribute it and/or @@ -65,15 +64,14 @@ class Trigger(TaskSpec): self.queued += 1 # All tasks that have already completed need to be put back to # READY. - for thetask in my_task.workflow.task_tree: - if thetask.thread_id != my_task.thread_id: + for task in my_task.workflow.task_tree: + if task.thread_id != my_task.thread_id: continue - if (thetask.task_spec == self and - thetask._has_state(TaskState.COMPLETED)): - thetask._set_state(TaskState.FUTURE) - thetask._ready() + if task.task_spec == self and task._has_state(TaskState.COMPLETED): + task._set_state(TaskState.FUTURE) + task._ready() - def _on_complete_hook(self, my_task): + def _run_hook(self, my_task): """ A hook into _on_complete() that does the task specific work. @@ -85,10 +83,10 @@ class Trigger(TaskSpec): times = int(valueof(my_task, self.times, 1)) + self.queued for i in range(times): for task_name in self.context: - task = my_task.workflow.get_task_spec_from_name(task_name) - task._on_trigger(my_task) + task_spec = my_task.workflow.get_task_spec_from_name(task_name) + task_spec._on_trigger(my_task) self.queued = 0 - TaskSpec._on_complete_hook(self, my_task) + return True def serialize(self, serializer): return serializer.serialize_trigger(self) diff --git a/SpiffWorkflow/SpiffWorkflow/specs/base.py b/SpiffWorkflow/SpiffWorkflow/specs/base.py index 1c3c229d..4fc736b0 100644 --- a/SpiffWorkflow/SpiffWorkflow/specs/base.py +++ b/SpiffWorkflow/SpiffWorkflow/specs/base.py @@ -74,10 +74,6 @@ class TaskSpec(object): :param wf_spec: A reference to the workflow specification that owns it. :type name: string :param name: A name for the task. - :type lock: list(str) - :param lock: A list of mutex names. The mutex is acquired - on entry of execute() and released on leave of - execute(). :type manual: bool :param manual: Whether this task requires a manual action to complete. :type data: dict((str, object)) @@ -107,7 +103,6 @@ class TaskSpec(object): self.defines = kwargs.get('defines', {}) self.pre_assign = kwargs.get('pre_assign',[]) self.post_assign = kwargs.get('post_assign', []) - self.locks = kwargs.get('lock', []) self.lookahead = 2 # Maximum number of MAYBE predictions. # Events. @@ -213,7 +208,7 @@ class TaskSpec(object): if len(self.inputs) < 1: raise WorkflowException(self, 'No input task connected.') - def _predict(self, my_task, seen=None, looked_ahead=0): + def _predict(self, my_task, seen=None, looked_ahead=0, mask=TaskState.PREDICTED_MASK): """ Updates the branch such that all possible future routes are added. @@ -229,26 +224,25 @@ class TaskSpec(object): if seen is None: seen = [] - self._predict_hook(my_task) - if not my_task._is_definite(): + if my_task._has_state(mask): + self._predict_hook(my_task) + + if my_task._is_predicted(): seen.append(self) + look_ahead = my_task._is_definite() or looked_ahead + 1 < self.lookahead for child in my_task.children: - if not child._is_finished() and child not in seen and look_ahead: - child.task_spec._predict(child, seen[:], looked_ahead + 1) + if child._has_state(mask) and child not in seen and look_ahead: + child.task_spec._predict(child, seen[:], looked_ahead + 1, mask) def _predict_hook(self, my_task): - # If the task's status is not predicted, we default to FUTURE for all it's outputs. + # If the task's status is definite, we default to FUTURE for all it's outputs. # Otherwise, copy my own state to the children. - if my_task._is_definite(): + if my_task._is_definite(): best_state = TaskState.FUTURE else: best_state = my_task.state - my_task._sync_children(self.outputs, best_state) - for child in my_task.children: - if not child._is_definite(): - child._set_state(best_state) def _update(self, my_task): """ @@ -281,42 +275,13 @@ class TaskSpec(object): assert my_task is not None self.test() - # Acquire locks, if any. - for lock in self.locks: - mutex = my_task.workflow._get_mutex(lock) - if not mutex.testandset(): - return - # Assign variables, if so requested. for assignment in self.pre_assign: assignment.assign(my_task, my_task) # Run task-specific code. - self._on_ready_before_hook(my_task) - self.reached_event.emit(my_task.workflow, my_task) self._on_ready_hook(my_task) - - # Run user code, if any. - if self.ready_event.emit(my_task.workflow, my_task): - # Assign variables, if so requested. - for assignment in self.post_assign: - assignment.assign(my_task, my_task) - - # Release locks, if any. - for lock in self.locks: - mutex = my_task.workflow._get_mutex(lock) - mutex.unlock() - - self.finished_event.emit(my_task.workflow, my_task) - - def _on_ready_before_hook(self, my_task): - """ - A hook into _on_ready() that does the task specific work. - - :type my_task: Task - :param my_task: The associated task in the task tree. - """ - pass + self.reached_event.emit(my_task.workflow, my_task) def _on_ready_hook(self, my_task): """ @@ -327,6 +292,35 @@ class TaskSpec(object): """ pass + def _run(self, my_task): + """ + Run the task. + + :type my_task: Task + :param my_task: The associated task in the task tree. + + :rtype: boolean or None + :returns: the value returned by the task spec's run method. + """ + result = self._run_hook(my_task) + # Run user code, if any. + if self.ready_event.emit(my_task.workflow, my_task): + # Assign variables, if so requested. + for assignment in self.post_assign: + assignment.assign(my_task, my_task) + + self.finished_event.emit(my_task.workflow, my_task) + return result + + def _run_hook(self, my_task): + """ + A hook into _run() that does the task specific work. + + :type my_task: Task + :param my_task: The associated task in the task tree. + """ + return True + def _on_cancel(self, my_task): """ May be called by another task to cancel the operation before it was @@ -359,20 +353,12 @@ class TaskSpec(object): :rtype: boolean :returns: True on success, False otherwise. """ - assert my_task is not None - - # We have to set the last task here, because the on_complete_hook - # of a loopback task may overwrite what the last_task will be. - my_task.workflow.last_task = my_task self._on_complete_hook(my_task) for child in my_task.children: - # Don't like this, but this is the most expedient way of preventing cancelled tasks from reactivation - if child.state != TaskState.CANCELLED: + if not child._is_finished(): child.task_spec._update(child) my_task.workflow._task_completed_notify(my_task) - self.completed_event.emit(my_task.workflow, my_task) - return True def _on_complete_hook(self, my_task): """ @@ -419,7 +405,6 @@ class TaskSpec(object): 'defines':self.defines, 'pre_assign':self.pre_assign, 'post_assign':self.post_assign, - 'locks':self.locks, 'lookahead':self.lookahead, } @@ -457,7 +442,6 @@ class TaskSpec(object): out.defines = s_state.get('defines') out.pre_assign = s_state.get('pre_assign') out.post_assign = s_state.get('post_assign') - out.locks = s_state.get('locks') out.lookahead = s_state.get('lookahead') return out diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/parser/process.py b/SpiffWorkflow/SpiffWorkflow/spiff/parser/process.py index a8238b6b..1dd61a71 100644 --- a/SpiffWorkflow/SpiffWorkflow/spiff/parser/process.py +++ b/SpiffWorkflow/SpiffWorkflow/spiff/parser/process.py @@ -6,6 +6,7 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, BoundaryEvent, IntermediateCatchEvent + from SpiffWorkflow.spiff.specs.none_task import NoneTask from SpiffWorkflow.spiff.specs.manual_task import ManualTask from SpiffWorkflow.spiff.specs.user_task import UserTask @@ -13,10 +14,18 @@ from SpiffWorkflow.spiff.specs.script_task import ScriptTask from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity from SpiffWorkflow.spiff.specs.service_task import ServiceTask from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask -from SpiffWorkflow.spiff.parser.task_spec import SpiffTaskParser, SubWorkflowParser, CallActivityParser, ServiceTaskParser, ScriptTaskParser +from SpiffWorkflow.spiff.specs.business_rule_task import BusinessRuleTask +from SpiffWorkflow.spiff.parser.task_spec import ( + SpiffTaskParser, + SubWorkflowParser, + CallActivityParser, + ServiceTaskParser, + ScriptTaskParser, + BusinessRuleTaskParser +) from SpiffWorkflow.spiff.parser.event_parsers import (SpiffStartEventParser, SpiffEndEventParser, SpiffBoundaryEventParser, SpiffIntermediateCatchEventParser, SpiffIntermediateThrowEventParser, SpiffSendTaskParser, SpiffReceiveTaskParser) -from SpiffWorkflow.dmn.specs import BusinessRuleTask + from SpiffWorkflow.spiff.parser.task_spec import BusinessRuleTaskParser @@ -44,4 +53,3 @@ class SpiffBpmnParser(BpmnDmnParser): full_tag('receiveTask'): (SpiffReceiveTaskParser, ReceiveTask), full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask) } - diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/parser/task_spec.py b/SpiffWorkflow/SpiffWorkflow/spiff/parser/task_spec.py index c4ce0f31..bf337a92 100644 --- a/SpiffWorkflow/SpiffWorkflow/spiff/parser/task_spec.py +++ b/SpiffWorkflow/SpiffWorkflow/spiff/parser/task_spec.py @@ -1,11 +1,11 @@ from lxml import etree -from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser from SpiffWorkflow.bpmn.parser.task_parsers import SubprocessParser from SpiffWorkflow.bpmn.parser.util import xpath_eval from SpiffWorkflow.spiff.specs.multiinstance_task import StandardLoopTask, ParallelMultiInstanceTask, SequentialMultiInstanceTask +from SpiffWorkflow.spiff.specs.business_rule_task import BusinessRuleTask SPIFFWORKFLOW_MODEL_NS = 'http://spiffworkflow.org/bpmn/schema/1.0/core' SPIFFWORKFLOW_MODEL_PREFIX = 'spiffworkflow' @@ -169,13 +169,19 @@ class BusinessRuleTaskParser(SpiffTaskParser): def create_task(self): decision_ref = self.get_decision_ref(self.node) - return BusinessRuleTask(self.spec, - self.get_task_spec_name(), - dmnEngine=self.process_parser.parser.get_engine(decision_ref, self.node), - lane=self.lane, - position=self.position, - description=self.node.get('name', None) - ) + extensions = self.parse_extensions() + prescript = extensions.get('preScript') + postscript = extensions.get('postScript') + return BusinessRuleTask( + self.spec, + self.get_task_spec_name(), + dmnEngine=self.process_parser.parser.get_engine(decision_ref, self.node), + lane=self.lane, + position=self.position, + description=self.node.get('name', None), + prescript=prescript, + postscript=postscript, + ) @staticmethod def get_decision_ref(node): diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/config.py b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/config.py index c624a289..771cb5d2 100644 --- a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/config.py +++ b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/config.py @@ -5,7 +5,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( SimpleTaskConverter, StartTaskConverter, EndJoinConverter, - LoopResetTaskConverter, StartEventConverter, EndEventConverter, IntermediateCatchEventConverter, @@ -32,6 +31,7 @@ from .task_spec import ( CallActivityTaskConverter, ParallelMultiInstanceTaskConverter, SequentialMultiInstanceTaskConverter, + BusinessRuleTaskConverter, ) from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter @@ -42,7 +42,6 @@ SPIFF_SPEC_CONFIG['task_specs'] = [ SimpleTaskConverter, StartTaskConverter, EndJoinConverter, - LoopResetTaskConverter, StartEventConverter, EndEventConverter, IntermediateCatchEventConverter, @@ -66,6 +65,7 @@ SPIFF_SPEC_CONFIG['task_specs'] = [ StandardLoopTaskConverter, ParallelMultiInstanceTaskConverter, SequentialMultiInstanceTaskConverter, + BusinessRuleTaskConverter ] SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventDefinitionConverter) SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter) \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec.py b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec.py index b754f87a..b5a4e589 100644 --- a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec.py +++ b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec.py @@ -1,5 +1,6 @@ from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter from SpiffWorkflow.bpmn.serializer.task_spec import MultiInstanceTaskConverter +from SpiffWorkflow.dmn.serializer.task_spec import BaseBusinessRuleTaskConverter from SpiffWorkflow.spiff.specs.none_task import NoneTask from SpiffWorkflow.spiff.specs.manual_task import ManualTask @@ -9,6 +10,7 @@ from SpiffWorkflow.spiff.specs.service_task import ServiceTask from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask from SpiffWorkflow.spiff.specs.multiinstance_task import StandardLoopTask, ParallelMultiInstanceTask, SequentialMultiInstanceTask +from SpiffWorkflow.spiff.specs.business_rule_task import BusinessRuleTask class SpiffBpmnTaskConverter(TaskSpecConverter): @@ -39,6 +41,16 @@ class UserTaskConverter(SpiffBpmnTaskConverter): super().__init__(UserTask, registry) +class BusinessRuleTaskConverter(BaseBusinessRuleTaskConverter, SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(BusinessRuleTask, registry) + + def to_dict(self, spec): + dct = BaseBusinessRuleTaskConverter.to_dict(self, spec) + dct.update(SpiffBpmnTaskConverter.to_dict(self, spec)) + return dct + + class SendTaskConverter(SpiffBpmnTaskConverter): def __init__(self, registry, typename=None): diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/specs/business_rule_task.py b/SpiffWorkflow/SpiffWorkflow/spiff/specs/business_rule_task.py new file mode 100644 index 00000000..204712ee --- /dev/null +++ b/SpiffWorkflow/SpiffWorkflow/spiff/specs/business_rule_task.py @@ -0,0 +1,5 @@ +from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask +from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask as DefaultBusinessRuleTask + +class BusinessRuleTask(DefaultBusinessRuleTask, SpiffBpmnTask): + pass diff --git a/SpiffWorkflow/SpiffWorkflow/task.py b/SpiffWorkflow/SpiffWorkflow/task.py index cefc913b..325222c4 100644 --- a/SpiffWorkflow/SpiffWorkflow/task.py +++ b/SpiffWorkflow/SpiffWorkflow/task.py @@ -77,9 +77,9 @@ class TaskState: CANCELLED = 64 FINISHED_MASK = CANCELLED | COMPLETED - DEFINITE_MASK = FUTURE | WAITING | READY | FINISHED_MASK - PREDICTED_MASK = FUTURE | LIKELY | MAYBE - NOT_FINISHED_MASK = PREDICTED_MASK | WAITING | READY + DEFINITE_MASK = FUTURE | WAITING | READY + PREDICTED_MASK = LIKELY | MAYBE + NOT_FINISHED_MASK = PREDICTED_MASK | DEFINITE_MASK ANY_MASK = FINISHED_MASK | NOT_FINISHED_MASK @@ -292,43 +292,6 @@ class Task(object, metaclass=DeprecatedMetaTask): self.data = DeepMerge.merge(self.data, data) data_log.info('Data update', extra=self.log_info()) - def set_children_future(self): - """ - for a parallel gateway, we need to set up our - children so that the gateway figures out that it needs to join up - the inputs - otherwise our child process never gets marked as - 'READY' - """ - if not self.task_spec.task_should_set_children_future(self): - return - - self.task_spec.task_will_set_children_future(self) - - # now we set this one to execute - self._set_state(TaskState.MAYBE) - self._sync_children(self.task_spec.outputs) - for child in self.children: - child.set_children_future() - - def reset_token(self, data, reset_data=False): - """ - Resets the token to this task. This should allow a trip 'back in time' - as it were to items that have already been completed. - :type reset_data: bool - :param reset_data: Do we want to have the data be where we left of in - this task or not - """ - self.internal_data = {} - if not reset_data and self.workflow.last_task and self.workflow.last_task.data: - # This is a little sly, the data that will get inherited should - # be from the last completed task, but we don't want to alter - # the tree, so we just set the parent's data to the given data. - self.parent.data = copy.deepcopy(data) - self.workflow.last_task = self.parent - self.set_children_future() # this method actually fixes the problem - self._set_state(TaskState.FUTURE) - self.task_spec._update(self) - def __iter__(self): return Task.Iterator(self) @@ -366,9 +329,7 @@ class Task(object, metaclass=DeprecatedMetaTask): self.children.remove(task) def _has_state(self, state): - """ - Returns True if the Task has the given state flag set. - """ + """Returns True if the Task has the given state flag set.""" return (self.state & state) != 0 def _is_finished(self): @@ -380,6 +341,43 @@ class Task(object, metaclass=DeprecatedMetaTask): def _is_definite(self): return self._has_state(TaskState.DEFINITE_MASK) + def set_children_future(self): + """ + for a parallel gateway, we need to set up our + children so that the gateway figures out that it needs to join up + the inputs - otherwise our child process never gets marked as + 'READY' + """ + if not self.task_spec.task_should_set_children_future(self): + return + + self.task_spec.task_will_set_children_future(self) + + # now we set this one to execute + self._set_state(TaskState.MAYBE) + self._sync_children(self.task_spec.outputs) + for child in self.children: + child.set_children_future() + + def reset_token(self, data, reset_data=False): + """ + Resets the token to this task. This should allow a trip 'back in time' + as it were to items that have already been completed. + :type reset_data: bool + :param reset_data: Do we want to have the data be where we left of in + this task or not + """ + self.internal_data = {} + if not reset_data and self.workflow.last_task and self.workflow.last_task.data: + # This is a little sly, the data that will get inherited should + # be from the last completed task, but we don't want to alter + # the tree, so we just set the parent's data to the given data. + self.parent.data = copy.deepcopy(data) + self.workflow.last_task = self.parent + self.set_children_future() # this method actually fixes the problem + self._set_state(TaskState.FUTURE) + self.task_spec._update(self) + def _add_child(self, task_spec, state=TaskState.MAYBE): """ Adds a new child and assigns the given TaskSpec to it. @@ -391,17 +389,60 @@ class Task(object, metaclass=DeprecatedMetaTask): :rtype: Task :returns: The new child task. """ - if task_spec is None: - raise ValueError(self, '_add_child() requires a TaskSpec') if self._is_predicted() and state & TaskState.PREDICTED_MASK == 0: - msg = 'Attempt to add non-predicted child to predicted task' - raise WorkflowException(msg, task_spec=self.task_spec) + raise WorkflowException('Attempt to add non-predicted child to predicted task', task_spec=self.task_spec) task = Task(self.workflow, task_spec, self, state=state) task.thread_id = self.thread_id if state == TaskState.READY: task._ready() return task + def _sync_children(self, task_specs, state=TaskState.MAYBE): + """ + This method syncs up the task's children with the given list of task + specs. In other words:: + + - Add one child for each given TaskSpec, unless that child already + exists. + - Remove all children for which there is no spec in the given list, + unless it is a "triggered" task. + .. note:: + + It is an error if the task has a non-predicted child that is + not given in the TaskSpecs. + + :type task_specs: list(TaskSpec) + :param task_specs: The list of task specs that may become children. + :type state: integer + :param state: The bitmask of states for the new children. + """ + if task_specs is None: + raise ValueError('"task_specs" argument is None') + new_children = task_specs[:] + + # Create a list of all children that are no longer needed. + unneeded_children = [] + for child in self.children: + if child.triggered: + # Triggered tasks are never removed. + pass + elif child.task_spec in new_children: + # If the task already exists, remove it from to-be-added and update its state + new_children.remove(child.task_spec) + if not child._is_finished(): + child._set_state(state) + else: + if child._is_definite(): + # Definite tasks must not be removed, so they HAVE to be in the given task spec list. + raise WorkflowException(f'removal of non-predicted child {child}', task_spec=self.task_spec) + unneeded_children.append(child) + + # Update children accordingly + for child in unneeded_children: + self.children.remove(child) + for task_spec in new_children: + self._add_child(task_spec, state) + def _assign_new_thread_id(self, recursive=True): """ Assigns a new thread id to the task. @@ -419,78 +460,6 @@ class Task(object, metaclass=DeprecatedMetaTask): child.thread_id = self.thread_id return self.thread_id - def _sync_children(self, task_specs, state=TaskState.MAYBE): - """ - This method syncs up the task's children with the given list of task - specs. In other words:: - - - Add one child for each given TaskSpec, unless that child already - exists. - - Remove all children for which there is no spec in the given list, - unless it is a "triggered" task. - - Handle looping back to previous tasks, so we don't end up with - an infinitely large tree. - .. note:: - - It is an error if the task has a non-predicted child that is - not given in the TaskSpecs. - - :type task_specs: list(TaskSpec) - :param task_specs: The list of task specs that may become children. - :type state: integer - :param state: The bitmask of states for the new children. - """ - if task_specs is None: - raise ValueError('"task_specs" argument is None') - new_children = task_specs[:] - - # If a child task_spec is also an ancestor, we are looping back, - # replace those specs with a loopReset task. - root_task = self._get_root() - for index, task_spec in enumerate(new_children): - ancestor_task = self._find_ancestor(task_spec) - if ancestor_task and ancestor_task != root_task: - destination = ancestor_task - new_spec = self.workflow.get_reset_task_spec(destination) - new_spec.outputs = [] - new_spec.inputs = task_spec.inputs - new_children[index] = new_spec - - # Create a list of all children that are no longer needed. - unneeded_children = [] - for child in self.children: - # Triggered tasks are never removed. - if child.triggered: - continue - - # If the task already exists, remove it from to-be-added - if child.task_spec in new_children: - new_children.remove(child.task_spec) - # We should set the state here but that breaks everything - continue - - # Definite tasks must not be removed, so they HAVE to be in the given task spec list. - if child._is_definite(): - raise WorkflowException(f'removal of non-predicted child {child}', task_spec=self.task_spec) - unneeded_children.append(child) - - # Remove and add the children accordingly. - for child in unneeded_children: - self.children.remove(child) - for task_spec in new_children: - self._add_child(task_spec, state) - - def _set_likely_task(self, task_specs): - if not isinstance(task_specs, list): - task_specs = [task_specs] - for task_spec in task_specs: - for child in self.children: - if child.task_spec != task_spec: - continue - if child._is_definite(): - continue - child._set_state(TaskState.LIKELY) - def _is_descendant_of(self, parent): """ Returns True if parent is in the list of ancestors, returns False @@ -574,15 +543,6 @@ class Task(object, metaclass=DeprecatedMetaTask): return self.parent return self.parent._find_ancestor_from_name(name) - def _ready(self): - """ - Marks the task as ready for execution. - """ - if self._has_state(TaskState.COMPLETED) or self._has_state(TaskState.CANCELLED): - return - self._set_state(TaskState.READY) - self.task_spec._on_ready(self) - def get_name(self): return str(self.task_spec.name) @@ -590,14 +550,10 @@ class Task(object, metaclass=DeprecatedMetaTask): return str(self.task_spec.description) def get_state_name(self): - """ - Returns a textual representation of this Task's state. - """ - state_name = [] + """Returns a textual representation of this Task's state.""" for state, name in list(TaskStateNames.items()): if self._has_state(state): - state_name.append(name) - return '|'.join(state_name) + return name def get_spec_data(self, name=None, default=None): """ @@ -648,37 +604,54 @@ class Task(object, metaclass=DeprecatedMetaTask): """ return self.data.get(name, default) - def cancel(self): - """ - Cancels the item if it was not yet completed, and removes - any children that are LIKELY. - """ - if self._is_finished(): - for child in self.children: - child.cancel() + def _ready(self): + """Marks the task as ready for execution.""" + if self._has_state(TaskState.COMPLETED) or self._has_state(TaskState.CANCELLED): return - self._set_state(TaskState.CANCELLED) - self._drop_children() - self.task_spec._on_cancel(self) + self._set_state(TaskState.READY) + self.task_spec._on_ready(self) - def complete(self): + def run(self): """ - Called by the associated task to let us know that its state - has changed (e.g. from FUTURE to COMPLETED.) + Execute the task. + + If the return value of task_spec._run is None, assume the task is not finished, + and move the task to WAITING. + + :rtype: boolean or None + :returns: the value returned by the task spec's run method """ - self._set_state(TaskState.COMPLETED) - # I am taking back my previous comment about running the task after it's completed being "CRAZY" - # Turns out that tasks are in fact supposed to be complete at this point and I've been wrong all along - # about when tasks should actually be executed start = time.time() - retval = self.task_spec._on_complete(self) + retval = self.task_spec._run(self) extra = self.log_info({ 'action': 'Complete', 'elapsed': time.time() - start }) metrics.debug('', extra=extra) + if retval is None: + self._set_state(TaskState.WAITING) + else: + # If we add an error state, the we can move the task to COMPLETE or ERROR + # according to the return value. + self.complete() return retval + def cancel(self): + """Cancels the item if it was not yet completed, and removes any children that are LIKELY.""" + if self._is_finished(): + for child in self.children: + child.cancel() + else: + self._set_state(TaskState.CANCELLED) + self._drop_children() + self.task_spec._on_cancel(self) + + def complete(self): + """Marks this task complete.""" + self._set_state(TaskState.COMPLETED) + self.task_spec._on_complete(self) + self.workflow.last_task = self + def trigger(self, *args): """ If recursive is True, the state is applied to the tree recursively. diff --git a/SpiffWorkflow/SpiffWorkflow/util/compat.py b/SpiffWorkflow/SpiffWorkflow/util/compat.py index dfcf0ce5..8a41fc5d 100644 --- a/SpiffWorkflow/SpiffWorkflow/util/compat.py +++ b/SpiffWorkflow/SpiffWorkflow/util/compat.py @@ -1,30 +1,22 @@ -from builtins import object +from threading import Lock -try: - # python 2 - from mutex import mutex +class mutex(object): -except ImportError: - # python 3 - from threading import Lock + def __init__(self): + self.lock = Lock() - class mutex(object): + def lock(self): + raise NotImplementedError - def __init__(self): - self.lock = Lock() - - def lock(self): - raise NotImplementedError - - def test(self): - has = self.lock.acquire(blocking=False) - if has: - self.lock.release() - - return has - - def testandset(self): - return self.lock.acquire(blocking=False) - - def unlock(self): + def test(self): + has = self.lock.acquire(blocking=False) + if has: self.lock.release() + + return has + + def testandset(self): + return self.lock.acquire(blocking=False) + + def unlock(self): + self.lock.release() diff --git a/SpiffWorkflow/SpiffWorkflow/workflow.py b/SpiffWorkflow/SpiffWorkflow/workflow.py index 3a6183b5..caa5f44b 100644 --- a/SpiffWorkflow/SpiffWorkflow/workflow.py +++ b/SpiffWorkflow/SpiffWorkflow/workflow.py @@ -20,14 +20,14 @@ import logging from .specs.Simple import Simple -from .specs.LoopResetTask import LoopResetTask from .task import Task, TaskState from .util.compat import mutex from .util.event import Event -from .exceptions import WorkflowException +from .exceptions import TaskNotFoundException, WorkflowException logger = logging.getLogger('spiff') + class Workflow(object): """ @@ -54,29 +54,25 @@ class Workflow(object): self.outer_workflow = kwargs.get('parent', self) self.locks = {} self.last_task = None - if deserializing: - assert 'Root' in workflow_spec.task_specs - root = workflow_spec.task_specs['Root'] # Probably deserialized + if 'Root' in workflow_spec.task_specs: + root = workflow_spec.task_specs['Root'] else: - if 'Root' in workflow_spec.task_specs: - root = workflow_spec.task_specs['Root'] - else: - root = Simple(workflow_spec, 'Root') - logger.info('Initialize', extra=self.log_info()) + root = Simple(workflow_spec, 'Root') # Setting TaskState.COMPLETED prevents the root task from being executed. self.task_tree = Task(self, root, state=TaskState.COMPLETED) + start = self.task_tree._add_child(self.spec.start, state=TaskState.FUTURE) self.success = True self.debug = False # Events. self.completed_event = Event() - start = self.task_tree._add_child(self.spec.start, state=TaskState.FUTURE) - - self.spec.start._predict(start) - if 'parent' not in kwargs: - start.task_spec._update(start) + if not deserializing: + self._predict() + if 'parent' not in kwargs: + start.task_spec._update(start) + logger.info('Initialize', extra=self.log_info()) self.task_mapping = self._get_task_mapping() @@ -108,6 +104,10 @@ class Workflow(object): return True return False + def _predict(self, mask=TaskState.NOT_FINISHED_MASK): + for task in Workflow.get_tasks(self,TaskState.NOT_FINISHED_MASK): + task.task_spec._predict(task, mask=mask) + def _get_waiting_tasks(self): waiting = Task.Iterator(self.task_tree, TaskState.WAITING) return [w for w in waiting] @@ -195,24 +195,6 @@ class Workflow(object): """ return self.spec.get_task_spec_from_name(name) - def get_task(self, id,tasklist=None): - """ - Returns the task with the given id. - - :type id:integer - :param id: The id of a task. - :param tasklist: Optional cache of get_tasks for operations - where we are calling multiple times as when we - are deserializing the workflow - :rtype: Task - :returns: The task with the given id. - """ - if tasklist: - tasks = [task for task in tasklist if task.id == id] - else: - tasks = [task for task in self.get_tasks() if task.id == id] - return tasks[0] if len(tasks) == 1 else None - def get_tasks_from_spec_name(self, name): """ Returns all tasks whose spec has the given name. @@ -222,15 +204,7 @@ class Workflow(object): :rtype: list[Task] :returns: A list of tasks that relate to the spec with the given name. """ - return [task for task in self.get_tasks_iterator() - if task.task_spec.name == name] - - def empty(self,str): - if str == None: - return True - if str == '': - return True - return False + return [task for task in self.get_tasks_iterator() if task.task_spec.name == name] def get_tasks(self, state=TaskState.ANY_MASK): """ @@ -243,38 +217,6 @@ class Workflow(object): """ return [t for t in Task.Iterator(self.task_tree, state)] - def reset_task_from_id(self, task_id): - """ - Runs the task with the given id. - - :type task_id: integer - :param task_id: The id of the Task object. - """ - if task_id is None: - raise WorkflowException('task_id is None', task_spec=self.spec) - data = {} - if self.last_task and self.last_task.data: - data = self.last_task.data - for task in self.task_tree: - if task.id == task_id: - return task.reset_token(data) - msg = 'A task with the given task_id (%s) was not found' % task_id - raise WorkflowException(msg, task_spec=self.spec) - - def get_reset_task_spec(self, destination): - """ - Returns a task, that once complete, will reset the workflow back - to a previously completed task. - :param destination: Task to reset to, on complete. - :return: TaskSpec - """ - name = "return_to_" + destination.task_spec.name - spec = self.get_task_spec_from_name(name) - if not spec: - spec = LoopResetTask(self.spec, name, destination.id, - destination.task_spec.name) - return spec - def get_tasks_iterator(self, state=TaskState.ANY_MASK): """ Returns a iterator of Task objects with the given state. @@ -286,22 +228,54 @@ class Workflow(object): """ return Task.Iterator(self.task_tree, state) - def complete_task_from_id(self, task_id): + def get_task_from_id(self, task_id, tasklist=None): + """ + Returns the task with the given id. + + :type id:integer + :param id: The id of a task. + :param tasklist: Optional cache of get_tasks for operations + where we are calling multiple times as when we + are deserializing the workflow + :rtype: Task + :returns: The task with the given id. + """ + if task_id is None: + raise WorkflowException('task_id is None', task_spec=self.spec) + tasklist = tasklist or self.task_tree + for task in self.task_tree: + if task.id == task_id: + return task + msg = 'A task with the given task_id (%s) was not found' % task_id + raise TaskNotFoundException(msg, task_spec=self.spec) + + def run_task_from_id(self, task_id): """ Runs the task with the given id. :type task_id: integer :param task_id: The id of the Task object. """ - if task_id is None: - raise WorkflowException('task_id is None', task_spec=self.spec) - for task in self.task_tree: - if task.id == task_id: - return task.complete() - msg = 'A task with the given task_id (%s) was not found' % task_id - raise WorkflowException(msg, task_spec=self.spec) + task = self.get_task_from_id(task_id) + return task.run() - def complete_next(self, pick_up=True, halt_on_manual=True): + def reset_task_from_id(self, task_id): + """ + Runs the task with the given id. + + :type task_id: integer + :param task_id: The id of the Task object. + """ + # Given that this is a BPMN thing it's questionable whether this belongs here at all + # However, since it calls a BPMN thing on `task`, I guess I'll leave it here + # At least it's not in both places any more + data = {} + if self.last_task and self.last_task.data: + data = self.last_task.data + task = self.get_task_from_id(task_id) + return task.reset_token(data) + + def run_next(self, pick_up=True, halt_on_manual=True): """ Runs the next task. Returns True if completed, False otherwise. @@ -329,7 +303,7 @@ class Workflow(object): self.last_task = None if task is not None: if not (halt_on_manual and task.task_spec.manual): - if task.complete(): + if task.run(): self.last_task = task return True blacklist.append(task) @@ -340,7 +314,7 @@ class Workflow(object): if task._is_descendant_of(blacklisted_task): continue if not (halt_on_manual and task.task_spec.manual): - if task.complete(): + if task.run(): self.last_task = task return True blacklist.append(task) @@ -353,7 +327,7 @@ class Workflow(object): return True return False - def complete_all(self, pick_up=True, halt_on_manual=True): + def run_all(self, pick_up=True, halt_on_manual=True): """ Runs all branches until completion. This is a convenience wrapper around :meth:`complete_next`, and the pick_up argument is passed @@ -366,7 +340,7 @@ class Workflow(object): complete any tasks that have manual=True. See :meth:`SpiffWorkflow.specs.TaskSpec.__init__` """ - while self.complete_next(pick_up, halt_on_manual): + while self.run_next(pick_up, halt_on_manual): pass def get_dump(self): diff --git a/SpiffWorkflow/doc/non-bpmn/tutorial/start.py b/SpiffWorkflow/doc/non-bpmn/tutorial/start.py index 4090384c..93d6aec5 100644 --- a/SpiffWorkflow/doc/non-bpmn/tutorial/start.py +++ b/SpiffWorkflow/doc/non-bpmn/tutorial/start.py @@ -19,7 +19,7 @@ workflow = Workflow(spec) # Execute until all tasks are done or require manual intervention. # For the sake of this tutorial, we ignore the "manual" flag on the # tasks. In practice, you probably don't want to do that. -workflow.complete_all(halt_on_manual=False) +workflow.run_all(halt_on_manual=False) # Alternatively, this is what a UI would do for a manual task. #workflow.complete_task_from_id(...) diff --git a/SpiffWorkflow/setup.py b/SpiffWorkflow/setup.py index ad4c2163..3429710d 100644 --- a/SpiffWorkflow/setup.py +++ b/SpiffWorkflow/setup.py @@ -21,7 +21,10 @@ setup(name='SpiffWorkflow', author_email='dan@sartography.com', license='lGPLv2', packages=find_packages(exclude=['tests', 'tests.*']), - package_data={'SpiffWorkflow.bpmn.parser.schema': ['*.xsd']}, + package_data={ + 'SpiffWorkflow.bpmn.parser': ['schema/*.xsd'], + 'SpiffWorkflow.dmn.parser': ['schema/*.xsd'], + }, install_requires=['configparser', 'lxml', 'celery', # required for python 3.7 - https://stackoverflow.com/a/73932581 'importlib-metadata<5.0; python_version <= "3.7"'], diff --git a/SpiffWorkflow/tests/SpiffWorkflow/PatternTest.py b/SpiffWorkflow/tests/SpiffWorkflow/PatternTest.py deleted file mode 100644 index 85a90d27..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/PatternTest.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- - -from builtins import object -import sys -import unittest -import os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - -from lxml import etree - -from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec -from SpiffWorkflow.task import Task -from SpiffWorkflow.serializer.prettyxml import XmlSerializer -from tests.SpiffWorkflow.util import run_workflow - - -class WorkflowTestData(object): - - def __init__(self, filename, spec, path, data): - self.filename = filename - self.spec = spec - self.path = path - self.data = data - - -class PatternTest(unittest.TestCase): - maxDiff = None - - def setUp(self): - Task.id_pool = 0 - Task.thread_id_pool = 0 - self.xml_path = ['data/spiff/control-flow', - 'data/spiff/data', - 'data/spiff/resource', - 'data/spiff'] - self.workflows = [] - - for basedir in self.xml_path: - dirname = os.path.join(os.path.dirname(__file__), basedir) - - for filename in os.listdir(dirname): - if not filename.endswith(('.xml', '.py')): - continue - if filename.endswith('__.py'): - continue - filename = os.path.join(dirname, filename) - self.load_workflow_spec(filename) - - def load_workflow_spec(self, filename): - # Load the .path file. - path_file = os.path.splitext(filename)[0] + '.path' - if os.path.exists(path_file): - with open(path_file) as fp: - expected_path = fp.read() - else: - expected_path = None - - # Load the .data file. - data_file = os.path.splitext(filename)[0] + '.data' - if os.path.exists(data_file): - with open(data_file) as fp: - expected_data = fp.read() - else: - expected_data = None - - # Test patterns that are defined in XML format. - if filename.endswith('.xml'): - with open(filename) as fp: - xml = etree.parse(fp).getroot() - serializer = XmlSerializer() - wf_spec = WorkflowSpec.deserialize( - serializer, xml, filename=filename) - - # Test patterns that are defined in Python. - elif filename.endswith('.py'): - with open(filename) as fp: - code = compile(fp.read(), filename, 'exec') - thedict = {} - result = eval(code, thedict) - wf_spec = thedict['TestWorkflowSpec']() - - else: - raise Exception('unsuported specification format', filename) - - test_data = WorkflowTestData( - filename, wf_spec, expected_path, expected_data) - self.workflows.append(test_data) - - def testWorkflowSpec(self): - for test in self.workflows: - print(test.filename) - run_workflow(self, test.spec, test.path, test.data) - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(PatternTest) -if __name__ == '__main__': - if len(sys.argv) == 2: - test = PatternTest('run_pattern') - test.setUp() - test.run_pattern(sys.argv[1]) - sys.exit(0) - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ApprovalsTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ApprovalsTest.py index 857c81f0..9247dec9 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ApprovalsTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ApprovalsTest.py @@ -2,7 +2,7 @@ import unittest from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +from .BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'matth' diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BaseParallelTestCase.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BaseParallelTestCase.py index 80fc5ff7..a936e602 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BaseParallelTestCase.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BaseParallelTestCase.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- -from builtins import range -import unittest import logging from SpiffWorkflow.task import TaskState -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'matth' @@ -28,13 +25,9 @@ class BaseParallelTestCase(BpmnWorkflowTestCase): "Doing step '%s' (with choice='%s')", s, choice) else: logging.info("Doing step '%s'", s) - # logging.debug(self.workflow.get_dump()) - self.do_next_named_step( - s, choice=choice, only_one_instance=only_one_instance) + self.do_next_named_step(s, choice=choice, only_one_instance=only_one_instance) self.workflow.do_engine_steps() if save_restore: - # logging.debug("Before SaveRestore: \n%s" % - # self.workflow.get_dump()) self.save_restore() self.workflow.do_engine_steps() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py index d8420daa..376975ba 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py @@ -34,7 +34,7 @@ class TestUserTask(UserTask): def do_choice(self, task, choice): task.set_data(choice=choice) - task.complete() + task.run() class TestExclusiveGatewayParser(ConditionalGatewayParser): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py index d16a8792..5f9b797c 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py @@ -64,7 +64,7 @@ class BpmnWorkflowTestCase(unittest.TestCase): def switch_workflow(p): for task_id, sp in p.workflow._get_outermost_workflow().subprocesses.items(): if p in sp.get_tasks(workflow=sp): - return p.workflow.get_task(task_id) + return p.workflow.get_task_from_id(task_id) def is_match(t): if not (t.task_spec.name == step_name_path[-1] or t.task_spec.description == step_name_path[-1]): @@ -116,7 +116,7 @@ class BpmnWorkflowTestCase(unittest.TestCase): if set_attribs: tasks[0].set_data(**set_attribs) - tasks[0].complete() + tasks[0].run() def save_restore(self): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py index 1e3d158e..2fa5adfc 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py @@ -6,7 +6,7 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.exceptions import WorkflowTaskException -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +from .BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CollaborationTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CollaborationTest.py index 9f28bdbb..b3c03f22 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CollaborationTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/CollaborationTest.py @@ -82,7 +82,7 @@ class CollaborationTest(BpmnWorkflowTestCase): workflow.do_engine_steps() for idx, task in enumerate(workflow.get_ready_user_tasks()): task.data['task_num'] = idx - task.complete() + task.run() workflow.do_engine_steps() ready_tasks = workflow.get_ready_user_tasks() waiting = workflow.get_tasks_from_spec_name('get_response') @@ -94,7 +94,7 @@ class CollaborationTest(BpmnWorkflowTestCase): # Now copy the task_num that was sent into a new variable for task in ready_tasks: task.data.update(init_id=task.data['task_num']) - task.complete() + task.run() workflow.do_engine_steps() # If the messages were routed properly, the id should match for task in workflow.get_tasks_from_spec_name('subprocess_end'): @@ -108,7 +108,7 @@ class CollaborationTest(BpmnWorkflowTestCase): workflow.do_engine_steps() for idx, task in enumerate(workflow.get_ready_user_tasks()): task.data['task_num'] = idx - task.complete() + task.run() workflow.do_engine_steps() # Two processes should have been started and two corresponding catch events should be waiting @@ -121,12 +121,12 @@ class CollaborationTest(BpmnWorkflowTestCase): # Now copy the task_num that was sent into a new variable for task in ready_tasks: task.data.update(init_id=task.data['task_num']) - task.complete() + task.run() workflow.do_engine_steps() # Complete dummy tasks for task in workflow.get_ready_user_tasks(): - task.complete() + task.run() workflow.do_engine_steps() # Repeat for the other process, using a different mapped name @@ -136,7 +136,7 @@ class CollaborationTest(BpmnWorkflowTestCase): self.assertEqual(len(waiting), 2) for task in ready_tasks: task.data.update(subprocess=task.data['task_num']) - task.complete() + task.run() workflow.do_engine_steps() # If the messages were routed properly, the id should match diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py index fab8db77..58420290 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py @@ -22,13 +22,13 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): # Add the data so that we can advance the workflow ready_tasks = self.workflow.get_ready_user_tasks() ready_tasks[0].data = { 'obj_1': 'hello' } - ready_tasks[0].complete() + ready_tasks[0].run() # Remove the data before advancing ready_tasks = self.workflow.get_ready_user_tasks() self.workflow.data.pop('obj_1') with self.assertRaises(WorkflowDataException) as exc: - ready_tasks[0].complete() + ready_tasks[0].run() self.assertEqual(exc.data_output.name, 'obj_1') def testMissingDataOutput(self): @@ -37,7 +37,7 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() ready_tasks = self.workflow.get_ready_user_tasks() with self.assertRaises(WorkflowDataException) as exc: - ready_tasks[0].complete() + ready_tasks[0].run() self.assertEqual(exc.data_output.name, 'obj_1') def actual_test(self, save_restore): @@ -48,7 +48,7 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): # Set up the data ready_tasks = self.workflow.get_ready_user_tasks() ready_tasks[0].data = { 'obj_1': 'hello' } - ready_tasks[0].complete() + ready_tasks[0].run() # After task completion, obj_1 should be copied out of the task into the workflow self.assertNotIn('obj_1', ready_tasks[0].data) self.assertIn('obj_1', self.workflow.data) @@ -59,14 +59,14 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): # Set a value for obj_1 in the task data again ready_tasks = self.workflow.get_ready_user_tasks() ready_tasks[0].data = { 'obj_1': 'hello again' } - ready_tasks[0].complete() + ready_tasks[0].run() # Check to make sure we use the workflow value instead of the value we set ready_tasks = self.workflow.get_ready_user_tasks() self.assertEqual(ready_tasks[0].data['obj_1'], 'hello') # Modify the value in the task ready_tasks[0].data = { 'obj_1': 'hello again' } - ready_tasks[0].complete() + ready_tasks[0].run() # We did not set an output data reference so obj_1 should remain unchanged in the workflow data # and be removed from the task data self.assertNotIn('obj_1', ready_tasks[0].data) @@ -77,7 +77,7 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_ready_user_tasks() self.assertEqual(ready_tasks[0].data['obj_1'], 'hello') ready_tasks[0].data['obj_1'] = 'hello again' - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() sp = self.workflow.get_tasks_from_spec_name('subprocess')[0] # It was copied out diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/IOSpecTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/IOSpecTest.py index b3f70fe6..fe8dc7dd 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/IOSpecTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/IOSpecTest.py @@ -76,7 +76,7 @@ class CallActivityDataTest(BpmnWorkflowTestCase): waiting = self.workflow.get_tasks(TaskState.WAITING) while len(waiting) == 0: next_task = self.workflow.get_tasks(TaskState.READY)[0] - next_task.complete() + next_task.run() waiting = self.workflow.get_tasks(TaskState.WAITING) def complete_subprocess(self): @@ -84,7 +84,7 @@ class CallActivityDataTest(BpmnWorkflowTestCase): waiting = self.workflow.get_tasks(TaskState.WAITING) while len(waiting) > 0: next_task = self.workflow.get_tasks(TaskState.READY)[0] - next_task.complete() + next_task.run() waiting = self.workflow.get_tasks(TaskState.WAITING) @@ -113,7 +113,7 @@ class IOSpecOnTaskTest(BpmnWorkflowTestCase): task = self.workflow.get_tasks_from_spec_name('any_task')[0] task.data.update({'out_1': 1}) with self.assertRaises(WorkflowDataException) as exc: - task.complete() + task.run() self.assertEqual(exc.exception.data_output.name, 'out_2') def actual_test(self, save_restore=False): @@ -124,6 +124,6 @@ class IOSpecOnTaskTest(BpmnWorkflowTestCase): task = self.workflow.get_tasks_from_spec_name('any_task')[0] self.assertDictEqual(task.data, {'in_1': 1, 'in_2': 'hello world'}) task.data.update({'out_1': 1, 'out_2': 'bye', 'extra': True}) - task.complete() + task.run() self.workflow.do_engine_steps() self.assertDictEqual(self.workflow.last_task.data, {'out_1': 1, 'out_2': 'bye'}) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py index f4a0e5db..1b46e29d 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py @@ -36,4 +36,4 @@ class InclusiveGatewayTest(BpmnWorkflowTestCase): def set_data(self, value): task = self.workflow.get_ready_user_tasks()[0] task.data = value - task.complete() + task.run() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelMultiInstanceTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelMultiInstanceTest.py index c72948b4..314d7393 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelMultiInstanceTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelMultiInstanceTest.py @@ -26,7 +26,7 @@ class BaseTestCase(BpmnWorkflowTestCase): self.assertEqual(task.task_spec.name, 'any_task [child]') self.assertIn('input_item', task.data) task.data['output_item'] = task.data['input_item'] * 2 - task.complete() + task.run() if save_restore: self.save_restore() ready_tasks = self.workflow.get_ready_user_tasks() @@ -47,7 +47,7 @@ class BaseTestCase(BpmnWorkflowTestCase): self.assertEqual(len(ready_tasks), 3) task = [t for t in ready_tasks if t.data['input_item'] == 2][0] task.data['output_item'] = task.data['input_item'] * 2 - task.complete() + task.run() self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelOrderTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelOrderTest.py index 39797448..3232ac6b 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelOrderTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelOrderTest.py @@ -20,7 +20,7 @@ class ParallelOrderTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() self.assertFalse(self.workflow.is_completed()) - self.assertEquals(4, len(self.workflow.get_ready_user_tasks())) + self.assertEqual(4, len(self.workflow.get_ready_user_tasks())) tasks = self.workflow.get_ready_user_tasks() self.assertEquals("Task 1", tasks[0].get_description()) self.assertEquals("Task 2", tasks[1].get_description()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelThroughSameTaskTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelThroughSameTaskTest.py index 4768ddba..07c790dc 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelThroughSameTaskTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ParallelThroughSameTaskTest.py @@ -51,7 +51,7 @@ class ParallelThroughSameTaskTest(BpmnWorkflowTestCase): self.assertEqual(2, len(ready_tasks)) self.assertEqual( 'Repeated Task', ready_tasks[0].task_spec.description) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() # The inclusive gateway allows us through here, because there is no route for the other thread # that doesn't use the same sequence flow @@ -82,7 +82,7 @@ class ParallelThroughSameTaskTest(BpmnWorkflowTestCase): self.assertEqual(2, len(ready_tasks)) self.assertEqual( 'Repeated Task', ready_tasks[0].task_spec.description) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() self.save_restore() # The inclusive gateway allows us through here, because there is no route for the other thread diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py index 81e5f330..b9cd99be 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py @@ -35,7 +35,7 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() top_level_task = self.workflow.get_ready_user_tasks()[0] - self.workflow.complete_task_from_id(top_level_task.id) + self.workflow.run_task_from_id(top_level_task.id) self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.save_restore() @@ -50,11 +50,11 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() self.assertEqual(1, len(self.workflow.get_ready_user_tasks())) task = self.workflow.get_ready_user_tasks()[0] - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'SubTask2') - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_tasks_from_spec_name('Task1')[0] task.reset_token(self.workflow.last_task.data) @@ -62,19 +62,19 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.reload_save_restore() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Task1') - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Subtask2') - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Subtask2A') - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Task2') - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetTimerTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetTimerTest.py deleted file mode 100644 index 8a94daba..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/ResetTimerTest.py +++ /dev/null @@ -1,28 +0,0 @@ - -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.task import TaskState - -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase - -class ResetTimerTest(BpmnWorkflowTestCase): - - def test_timer(self): - spec, subprocess = self.load_workflow_spec('reset_timer.bpmn', 'main') - self.workflow = BpmnWorkflow(spec, subprocess) - self.workflow.do_engine_steps() - task_1 = self.workflow.get_tasks_from_spec_name('task_1')[0] - timer = self.workflow.get_tasks_from_spec_name('timer')[0] - original_timer = timer.internal_data.get('event_value') - # This returns us to the task - task_1.data['modify'] = True - task_1.complete() - self.workflow.do_engine_steps() - # The timer should be waiting and the time should have been updated - self.assertEqual(task_1.state, TaskState.READY) - self.assertEqual(timer.state, TaskState.WAITING) - self.assertGreater(timer.internal_data.get('event_value'), original_timer) - task_1.data['modify'] = False - task_1.complete() - self.workflow.do_engine_steps() - self.assertEqual(timer.state, TaskState.CANCELLED) - self.assertTrue(self.workflow.is_completed()) \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SequentialMultiInstanceTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SequentialMultiInstanceTest.py index ba93fdbe..c70b8e47 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SequentialMultiInstanceTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SequentialMultiInstanceTest.py @@ -27,7 +27,7 @@ class BaseTestCase(BpmnWorkflowTestCase): self.assertEqual(task.task_spec.name, 'any_task [child]') self.assertIn('input_item', task.data) task.data['output_item'] = task.data['input_item'] * 2 - task.complete() + task.run() if save_restore: self.save_restore() ready_tasks = self.workflow.get_ready_user_tasks() @@ -54,7 +54,7 @@ class BaseTestCase(BpmnWorkflowTestCase): self.assertEqual(ready.task_spec.name, 'any_task [child]') self.assertIn('input_item', ready.data) ready.data['output_item'] = ready.data['input_item'] * 2 - ready.complete() + ready.run() self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() ready_tasks = self.workflow.get_ready_user_tasks() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/StandardLoopTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/StandardLoopTest.py index e9d6534c..18a81976 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/StandardLoopTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/StandardLoopTest.py @@ -21,7 +21,7 @@ class StandardLoopTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_ready_user_tasks() self.assertEqual(len(ready_tasks), 1) ready_tasks[0].data[str(idx)] = True - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) @@ -36,7 +36,7 @@ class StandardLoopTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_ready_user_tasks() self.assertEqual(len(ready_tasks), 1) ready_tasks[0].data['done'] = True - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SwimLaneTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SwimLaneTest.py index 12bbeddd..a7000d52 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SwimLaneTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/SwimLaneTest.py @@ -35,7 +35,7 @@ class SwimLaneTest(BpmnWorkflowTestCase): self.assertEqual(0, len(btasks)) task = atasks[0] self.assertEqual('Activity_A1', task.task_spec.name) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() atasks = self.workflow.get_ready_user_tasks(lane="A") btasks = self.workflow.get_ready_user_tasks(lane="B") @@ -44,10 +44,10 @@ class SwimLaneTest(BpmnWorkflowTestCase): # Complete the gateway and the two tasks in B Lane btasks[0].data = {'NeedClarification': False} - self.workflow.complete_task_from_id(btasks[0].id) + self.workflow.run_task_from_id(btasks[0].id) self.workflow.do_engine_steps() btasks = self.workflow.get_ready_user_tasks(lane="B") - self.workflow.complete_task_from_id(btasks[0].id) + self.workflow.run_task_from_id(btasks[0].id) self.workflow.do_engine_steps() # Assert we are in lane C @@ -56,7 +56,7 @@ class SwimLaneTest(BpmnWorkflowTestCase): self.assertEqual(tasks[0].task_spec.lane, "C") # Step into the sub-process, assure that is also in lane C - self.workflow.complete_task_from_id(tasks[0].id) + self.workflow.run_task_from_id(tasks[0].id) self.workflow.do_engine_steps() tasks = self.workflow.get_ready_user_tasks() self.assertEqual("SubProcessTask", tasks[0].task_spec.description) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py deleted file mode 100644 index 0b3d5603..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- - -import datetime -import unittest - -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine -from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase - -__author__ = 'sartography' - -class CustomScriptEngine(PythonScriptEngine): - """This is a custom script processor that can be easily injected into Spiff Workflow. - It will execute python code read in from the bpmn. It will also make any scripts in the - scripts directory available for execution. """ - def __init__(self): - environment = TaskDataEnvironment({ - 'timedelta': datetime.timedelta, - }) - super().__init__(environment=environment) - -class TooManyLoopsTest(BpmnWorkflowTestCase): - - """Looping back around many times would cause the tree of tasks to grow - for each loop, doing this a 100 or 1000 times would cause the system to - run fail in various ways. This assures that is no longer the case.""" - - def testRunThroughHappy(self): - self.actual_test(save_restore=False) - - def testThroughSaveRestore(self): - self.actual_test(save_restore=True) - - def actual_test(self,save_restore = False): - spec, subprocesses = self.load_workflow_spec('too_many_loops*.bpmn', 'loops') - self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine()) - counter = 0 - data = {} - while not self.workflow.is_completed(): - self.workflow.do_engine_steps() - self.workflow.refresh_waiting_tasks() - if (self.workflow.last_task.data != data): - data = self.workflow.last_task.data - counter += 1 # There is a 10 millisecond wait task. - if save_restore: - self.save_restore() - self.workflow.script_engine = CustomScriptEngine() - self.assertEqual(20, self.workflow.last_task.data['counter']) - - def test_with_sub_process(self): - # Found an issue where looping back would fail when it happens - # right after a sub-process. So assuring this is fixed. - counter = 0 - spec, subprocesses = self.load_workflow_spec('too_many_loops_sub_process.bpmn', 'loops_sub') - self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine()) - data = {} - while not self.workflow.is_completed(): - self.workflow.do_engine_steps() - self.workflow.refresh_waiting_tasks() - if (self.workflow.last_task.data != data): - data = self.workflow.last_task.data - counter += 1 # There is a 10 millisecond wait task. -# self.save_restore() - self.assertEqual(20, self.workflow.last_task.data['counter']) - # One less, because we don't go back through once the first counter - # hits 20. - self.assertEqual(19, self.workflow.last_task.data['counter2']) - - def test_with_two_call_activities(self): - spec, subprocess = self.load_workflow_spec('sub_in_loop*.bpmn', 'main') - self.workflow = BpmnWorkflow(spec, subprocess, script_engine=CustomScriptEngine()) - self.workflow.do_engine_steps() - for loop in range(3): - ready = self.workflow.get_ready_user_tasks() - ready[0].data = { 'done': True if loop == 3 else False } - ready[0].complete() - self.workflow.refresh_waiting_tasks() - self.workflow.do_engine_steps() - self.save_restore() - self.workflow.script_engine = CustomScriptEngine() - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(TooManyLoopsTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/Test-Workflows/Parallel-Looping-After-Join.bpmn20.xml b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/Test-Workflows/Parallel-Looping-After-Join.bpmn20.xml index 150b28d1..c37bc64d 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/Test-Workflows/Parallel-Looping-After-Join.bpmn20.xml +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/Test-Workflows/Parallel-Looping-After-Join.bpmn20.xml @@ -1,291 +1,303 @@ - - - - - - - - - - - - - - - sid-B33EE043-AB93-4343-A1D4-7B267E2DAFBE - sid-349F8C0C-45EA-489C-84DD-1D944F48D778 - sid-57463471-693A-42A2-9EC6-6460BEDECA86 - sid-CA089240-802A-4C32-9130-FB1A33DDCCC3 - sid-E976FBC2-266E-420F-8D4D-C8FBC6199090 - sid-F3A979E3-F586-4807-8223-1FAB5A5647B0 - sid-51816945-79BF-47F9-BA3C-E95ABAE3D1DB - sid-4F7F3AA6-4E8E-458D-BBEF-E03FC0646897 - sid-ABD788A3-CD57-4280-A22A-260B3AEEE138 - sid-E7B8898A-0D14-4E98-B3D7-736B94EE3FA7 - sid-A1609BD5-1E4A-47AE-8648-1DD41D1B1D58 - sid-1946C635-7886-4687-844F-C644FA6222B8 - sid-4C3B3C16-91DB-43E3-A816-FFEE572E61E1 - sid-55C018B8-C073-4292-9ED0-79BDE50E7498 - sid-E7904BFA-1F17-478E-91C9-C8A5B64190C9 - - - - - - - sid-F3994F51-FE54-4910-A1F4-E5895AA1A612 - - - - - - sid-3E0EBE59-75C8-465C-90CC-197CE808A96E - sid-7E15C71B-DE9E-4788-B140-A647C99FDC94 - sid-B6E22A74-A691-453A-A789-B9F8AF787D7C - - - - - - sid-7E15C71B-DE9E-4788-B140-A647C99FDC94 - sid-607CB05E-8762-41B6-AD43-C3970661A99D - - - - - - sid-B6E22A74-A691-453A-A789-B9F8AF787D7C - sid-CAEAD081-6E73-4C98-8656-C67DA18F5140 - - - - - - sid-231F8A51-752F-4CB3-8FD1-23D153238344 - sid-607CB05E-8762-41B6-AD43-C3970661A99D - sid-0895E09C-077C-4D12-8C11-31F28CBC7740 - - - - - - sid-0895E09C-077C-4D12-8C11-31F28CBC7740 - sid-2668AC98-39E4-4B12-9052-930528086CAC - - - - - - sid-F6160C0E-216C-4D72-98D1-CC5549327D55 - - - - - - sid-CAEAD081-6E73-4C98-8656-C67DA18F5140 - sid-918C653D-0960-4223-9C28-78114F238BCC - sid-FD82C2A6-7C54-4890-901E-A7E864F7605C - - - - - - sid-918C653D-0960-4223-9C28-78114F238BCC - sid-961AF51C-9935-410E-AAA4-105B19186F5E - - - - - - sid-FD82C2A6-7C54-4890-901E-A7E864F7605C - sid-47947925-21CD-46FF-8D3F-294B235AA4CF - - - - - - sid-961AF51C-9935-410E-AAA4-105B19186F5E - sid-47947925-21CD-46FF-8D3F-294B235AA4CF - sid-AFA38469-CD5C-42A2-9473-2EAEBA61F0C0 - - - - - - sid-AFA38469-CD5C-42A2-9473-2EAEBA61F0C0 - sid-231F8A51-752F-4CB3-8FD1-23D153238344 - - - - - - sid-2668AC98-39E4-4B12-9052-930528086CAC - sid-08D6385B-C6BB-45FC-A6BD-2369F392868D - sid-41205B5D-4DBA-4155-A0EE-7D71CE9AA459 - - - - - - sid-08D6385B-C6BB-45FC-A6BD-2369F392868D - sid-F6160C0E-216C-4D72-98D1-CC5549327D55 - - - - - - sid-41205B5D-4DBA-4155-A0EE-7D71CE9AA459 - sid-F3994F51-FE54-4910-A1F4-E5895AA1A612 - sid-3E0EBE59-75C8-465C-90CC-197CE808A96E - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + start + first_split + one + two + join_of_first + retry + end + second_split + two_a + two_b + join_of_second_split + two_done + exclusive + done + go + + + + + + + sid-F3994F51-FE54-4910-A1F4-E5895AA1A612 + + + + + + sid-3E0EBE59-75C8-465C-90CC-197CE808A96E + sid-7E15C71B-DE9E-4788-B140-A647C99FDC94 + sid-B6E22A74-A691-453A-A789-B9F8AF787D7C + + + + + + sid-7E15C71B-DE9E-4788-B140-A647C99FDC94 + join_of_first_split + + + + + + sid-B6E22A74-A691-453A-A789-B9F8AF787D7C + sid-CAEAD081-6E73-4C98-8656-C67DA18F5140 + + + + + + sid-231F8A51-752F-4CB3-8FD1-23D153238344 + join_of_first_split + sid-0895E09C-077C-4D12-8C11-31F28CBC7740 + + + + + + sid-0895E09C-077C-4D12-8C11-31F28CBC7740 + sid-2668AC98-39E4-4B12-9052-930528086CAC + + + + + + sid-F6160C0E-216C-4D72-98D1-CC5549327D55 + + + + + + sid-CAEAD081-6E73-4C98-8656-C67DA18F5140 + sid-918C653D-0960-4223-9C28-78114F238BCC + sid-FD82C2A6-7C54-4890-901E-A7E864F7605C + + + + + + sid-918C653D-0960-4223-9C28-78114F238BCC + sid-961AF51C-9935-410E-AAA4-105B19186F5E + + + + + + sid-FD82C2A6-7C54-4890-901E-A7E864F7605C + sid-47947925-21CD-46FF-8D3F-294B235AA4CF + + + + + + sid-961AF51C-9935-410E-AAA4-105B19186F5E + sid-47947925-21CD-46FF-8D3F-294B235AA4CF + sid-AFA38469-CD5C-42A2-9473-2EAEBA61F0C0 + + + + + + sid-AFA38469-CD5C-42A2-9473-2EAEBA61F0C0 + sid-231F8A51-752F-4CB3-8FD1-23D153238344 + + + + + + sid-2668AC98-39E4-4B12-9052-930528086CAC + sid-08D6385B-C6BB-45FC-A6BD-2369F392868D + sid-41205B5D-4DBA-4155-A0EE-7D71CE9AA459 + + + + + + sid-08D6385B-C6BB-45FC-A6BD-2369F392868D + sid-F6160C0E-216C-4D72-98D1-CC5549327D55 + + + + + + sid-41205B5D-4DBA-4155-A0EE-7D71CE9AA459 + sid-F3994F51-FE54-4910-A1F4-E5895AA1A612 + sid-3E0EBE59-75C8-465C-90CC-197CE808A96E + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/reset_timer.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/reset_timer.bpmn deleted file mode 100644 index 7750e4bb..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/reset_timer.bpmn +++ /dev/null @@ -1,104 +0,0 @@ - - - - - Flow_0j648np - - - Flow_0j648np - modify - Flow_13cp5nc - - - - Flow_13cp5nc - Flow_1r81vou - - - - Flow_0m5s7t9 - Flow_0p7c88x - - - Flow_1gm7381 - Flow_0p7c88x - - - Flow_0m5s7t9 - - "PT60S" - - - - - Flow_1r81vou - modify - Flow_1gm7381 - - - - modify - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-loop-reset.json b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-loop-reset.json new file mode 100644 index 00000000..4068fdca --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-loop-reset.json @@ -0,0 +1,600 @@ +{ + "serializer_version": "1.1", + "data": {}, + "last_task": "65ef57f0-3fbf-4851-b7c8-a03de9a9062d", + "success": true, + "tasks": { + "fcccd5d5-8e9c-4dba-91c3-5b2ed44bb332": { + "id": "fcccd5d5-8e9c-4dba-91c3-5b2ed44bb332", + "parent": null, + "children": [ + "e9523ea2-0474-4c36-a7c2-24ff56633ed7" + ], + "last_state_change": 1678818080.7799659, + "state": 32, + "task_spec": "Root", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "e9523ea2-0474-4c36-a7c2-24ff56633ed7": { + "id": "e9523ea2-0474-4c36-a7c2-24ff56633ed7", + "parent": "fcccd5d5-8e9c-4dba-91c3-5b2ed44bb332", + "children": [ + "af4a1c32-12a7-46a7-b985-284fef1cb993" + ], + "last_state_change": 1678818080.7888825, + "state": 32, + "task_spec": "Start", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "af4a1c32-12a7-46a7-b985-284fef1cb993": { + "id": "af4a1c32-12a7-46a7-b985-284fef1cb993", + "parent": "e9523ea2-0474-4c36-a7c2-24ff56633ed7", + "children": [ + "65ef57f0-3fbf-4851-b7c8-a03de9a9062d" + ], + "last_state_change": 1678818080.7926495, + "state": 32, + "task_spec": "StartEvent_1", + "triggered": false, + "workflow_name": "loops", + "internal_data": { + "event_fired": true + }, + "data": {} + }, + "65ef57f0-3fbf-4851-b7c8-a03de9a9062d": { + "id": "65ef57f0-3fbf-4851-b7c8-a03de9a9062d", + "parent": "af4a1c32-12a7-46a7-b985-284fef1cb993", + "children": [ + "b5200074-8196-40d3-8f83-204cf132856c" + ], + "last_state_change": 1678818080.7970355, + "state": 32, + "task_spec": "initialize", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": { + "counter": 0 + } + }, + "b5200074-8196-40d3-8f83-204cf132856c": { + "id": "b5200074-8196-40d3-8f83-204cf132856c", + "parent": "65ef57f0-3fbf-4851-b7c8-a03de9a9062d", + "children": [ + "2ab8ec7f-a9ee-4891-a7a9-20250e2ab816" + ], + "last_state_change": 1678818091.420421, + "state": 16, + "task_spec": "TIMER_EVENT", + "triggered": false, + "workflow_name": "loops", + "internal_data": { + "event_value": "2023-03-14T18:21:20.809141+00:00", + "event_fired": true + }, + "data": { + "counter": 0 + } + }, + "2ab8ec7f-a9ee-4891-a7a9-20250e2ab816": { + "id": "2ab8ec7f-a9ee-4891-a7a9-20250e2ab816", + "parent": "b5200074-8196-40d3-8f83-204cf132856c", + "children": [ + "259de884-e162-4e0b-8c86-d4827870e2ca" + ], + "last_state_change": 1678818080.7815213, + "state": 4, + "task_spec": "increment_counter", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "259de884-e162-4e0b-8c86-d4827870e2ca": { + "id": "259de884-e162-4e0b-8c86-d4827870e2ca", + "parent": "2ab8ec7f-a9ee-4891-a7a9-20250e2ab816", + "children": [ + "bd7c4868-142b-487f-9ed3-6a384762dd55" + ], + "last_state_change": 1678818080.7818034, + "state": 4, + "task_spec": "Activity_0w5u4k4", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "bd7c4868-142b-487f-9ed3-6a384762dd55": { + "id": "bd7c4868-142b-487f-9ed3-6a384762dd55", + "parent": "259de884-e162-4e0b-8c86-d4827870e2ca", + "children": [ + "2fc6691e-f99b-46f2-bebc-7ba729880d13", + "3a14ea66-76b3-4442-8cb3-71a89b9b92fd" + ], + "last_state_change": 1678818080.782314, + "state": 4, + "task_spec": "Gateway_over_20", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "2fc6691e-f99b-46f2-bebc-7ba729880d13": { + "id": "2fc6691e-f99b-46f2-bebc-7ba729880d13", + "parent": "bd7c4868-142b-487f-9ed3-6a384762dd55", + "children": [ + "655b5f82-14dd-4edd-b88d-94e8cce02e78" + ], + "last_state_change": 1678818080.783402, + "state": 1, + "task_spec": "end_event5", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "655b5f82-14dd-4edd-b88d-94e8cce02e78": { + "id": "655b5f82-14dd-4edd-b88d-94e8cce02e78", + "parent": "2fc6691e-f99b-46f2-bebc-7ba729880d13", + "children": [], + "last_state_change": 1678818080.7840528, + "state": 1, + "task_spec": "loops.EndJoin", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + }, + "3a14ea66-76b3-4442-8cb3-71a89b9b92fd": { + "id": "3a14ea66-76b3-4442-8cb3-71a89b9b92fd", + "parent": "bd7c4868-142b-487f-9ed3-6a384762dd55", + "children": [], + "last_state_change": 1678818080.7835343, + "state": 1, + "task_spec": "return_to_TIMER_EVENT", + "triggered": false, + "workflow_name": "loops", + "internal_data": {}, + "data": {} + } + }, + "root": "fcccd5d5-8e9c-4dba-91c3-5b2ed44bb332", + "spec": { + "name": "loops", + "description": "loops", + "file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops.bpmn", + "task_specs": { + "Start": { + "id": "loops_1", + "name": "Start", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [], + "outputs": [ + "StartEvent_1" + ], + "typename": "StartTask" + }, + "loops.EndJoin": { + "id": "loops_2", + "name": "loops.EndJoin", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "end_event5" + ], + "outputs": [ + "End" + ], + "typename": "_EndJoin" + }, + "End": { + "id": "loops_3", + "name": "End", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "loops.EndJoin" + ], + "outputs": [], + "typename": "Simple" + }, + "StartEvent_1": { + "id": "loops_4", + "name": "StartEvent_1", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Start" + ], + "outputs": [ + "initialize" + ], + "lane": null, + "documentation": null, + "position": { + "x": 152.0, + "y": 159.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "StartEvent", + "extensions": {} + }, + "initialize": { + "id": "loops_5", + "name": "initialize", + "description": "initialize", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "StartEvent_1" + ], + "outputs": [ + "TIMER_EVENT" + ], + "lane": null, + "documentation": null, + "position": { + "x": 250.0, + "y": 137.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "script": "counter = 0", + "typename": "ScriptTask", + "extensions": {} + }, + "TIMER_EVENT": { + "id": "loops_6", + "name": "TIMER_EVENT", + "description": "Wait", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_over_20", + "initialize" + ], + "outputs": [ + "increment_counter" + ], + "lane": null, + "documentation": null, + "position": { + "x": 412.0, + "y": 159.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": true, + "external": true, + "name": "Wait", + "expression": "\"PT.01S\"", + "typename": "DurationTimerEventDefinition" + }, + "typename": "IntermediateCatchEvent", + "extensions": {} + }, + "increment_counter": { + "id": "loops_7", + "name": "increment_counter", + "description": "increment counter", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "TIMER_EVENT" + ], + "outputs": [ + "Activity_0w5u4k4" + ], + "lane": null, + "documentation": null, + "position": { + "x": 480.0, + "y": 137.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "script": "counter = counter + 1", + "typename": "ScriptTask", + "extensions": {} + }, + "Activity_0w5u4k4": { + "id": "loops_8", + "name": "Activity_0w5u4k4", + "description": "call something", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "increment_counter" + ], + "outputs": [ + "Gateway_over_20" + ], + "lane": null, + "documentation": null, + "position": { + "x": 620.0, + "y": 137.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "spec": "loops_ca", + "typename": "CallActivity", + "extensions": {} + }, + "Gateway_over_20": { + "id": "loops_9", + "name": "Gateway_over_20", + "description": "is > 20", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Activity_0w5u4k4" + ], + "outputs": [ + "end_event5", + "TIMER_EVENT" + ], + "lane": null, + "documentation": null, + "position": { + "x": 755.0, + "y": 152.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "cond_task_specs": [ + { + "condition": "counter >= 20", + "task_spec": "end_event5" + }, + { + "condition": "counter < 20", + "task_spec": "TIMER_EVENT" + } + ], + "choice": null, + "default_task_spec": null, + "typename": "ExclusiveGateway", + "extensions": {} + }, + "end_event5": { + "id": "loops_10", + "name": "end_event5", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_over_20" + ], + "outputs": [ + "loops.EndJoin" + ], + "lane": null, + "documentation": "### Results\nSubmission for Pre-Review was sent to the HSR-IRB on {{ sent_local_date_str }} at {{ sent_local_time_str }}.\n\nThe HSR-IRB started the Pre-Review process on {{ end_local_date_str }} at {{ end_local_time_str }} and assigned {{ irb_info.IRB_ADMINISTRATIVE_REVIEWER }} as the reviewer.\n\n### Metrics\n\n\nDays elapsed: {{days_delta }}", + "position": { + "x": 932.0, + "y": 159.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "EndEvent", + "extensions": {} + }, + "Root": { + "id": "loops_11", + "name": "Root", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [], + "outputs": [], + "typename": "Simple" + }, + "return_to_TIMER_EVENT": { + "id": "loops_12", + "name": "return_to_TIMER_EVENT", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_over_20", + "initialize" + ], + "outputs": [], + "destination_id": "b5200074-8196-40d3-8f83-204cf132856c", + "destination_spec_name": "TIMER_EVENT", + "typename": "LoopResetTask" + } + }, + "io_specification": null, + "data_objects": {}, + "correlation_keys": {}, + "typename": "BpmnProcessSpec" + }, + "subprocess_specs": { + "loops_ca": { + "name": "loops_ca", + "description": "loops_ca", + "file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_call_activity.bpmn", + "task_specs": { + "Start": { + "id": "loops_ca_1", + "name": "Start", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [], + "outputs": [ + "StartEvent_1" + ], + "typename": "StartTask" + }, + "loops_ca.EndJoin": { + "id": "loops_ca_2", + "name": "loops_ca.EndJoin", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "end_event5" + ], + "outputs": [ + "End" + ], + "typename": "_EndJoin" + }, + "End": { + "id": "loops_ca_3", + "name": "End", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "loops_ca.EndJoin" + ], + "outputs": [], + "typename": "Simple" + }, + "StartEvent_1": { + "id": "loops_ca_4", + "name": "StartEvent_1", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Start" + ], + "outputs": [ + "increment_counter" + ], + "lane": null, + "documentation": null, + "position": { + "x": 152.0, + "y": 109.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "StartEvent", + "extensions": {} + }, + "increment_counter": { + "id": "loops_ca_5", + "name": "increment_counter", + "description": "increment counter", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "StartEvent_1" + ], + "outputs": [ + "end_event5" + ], + "lane": null, + "documentation": null, + "position": { + "x": 220.0, + "y": 87.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "script": "counter2 = 1000", + "typename": "ScriptTask", + "extensions": {} + }, + "end_event5": { + "id": "loops_ca_6", + "name": "end_event5", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "increment_counter" + ], + "outputs": [ + "loops_ca.EndJoin" + ], + "lane": null, + "documentation": "### Results\nSubmission for Pre-Review was sent to the HSR-IRB on {{ sent_local_date_str }} at {{ sent_local_time_str }}.\n\nThe HSR-IRB started the Pre-Review process on {{ end_local_date_str }} at {{ end_local_time_str }} and assigned {{ irb_info.IRB_ADMINISTRATIVE_REVIEWER }} as the reviewer.\n\n### Metrics\n\n\nDays elapsed: {{days_delta }}", + "position": { + "x": 362.0, + "y": 109.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "EndEvent", + "extensions": {} + } + }, + "io_specification": null, + "data_objects": {}, + "correlation_keys": {}, + "typename": "BpmnProcessSpec" + } + }, + "subprocesses": {}, + "bpmn_messages": [], + "correlations": {} +} diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop.bpmn deleted file mode 100644 index ec8720e0..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop.bpmn +++ /dev/null @@ -1,88 +0,0 @@ - - - - - Flow_0nlj5lh - - - Flow_0nlj5lh - Flow_16vai1a - Flow_1lkecht - - - - Flow_1lkecht - Flow_1vci114 - - - - Flow_0iui938 - Flow_0ew7zdi - Flow_16vai1a - - - Flow_0ew7zdi - - - done - - - - Flow_1vci114 - Flow_0iui938 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop_call_activity.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop_call_activity.bpmn deleted file mode 100644 index 2af69b3b..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/sub_in_loop_call_activity.bpmn +++ /dev/null @@ -1,38 +0,0 @@ - - - - - Flow_1dbtwxp - - - - Flow_1t99mly - - - - Flow_1dbtwxp - Flow_1t99mly - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops.bpmn deleted file mode 100644 index 97f663c6..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops.bpmn +++ /dev/null @@ -1,125 +0,0 @@ - - - - - Flow_1gb8wca - Flow_1d2usdq - counter = counter + 1 - - - - ### Results -Submission for Pre-Review was sent to the HSR-IRB on {{ sent_local_date_str }} at {{ sent_local_time_str }}. - -The HSR-IRB started the Pre-Review process on {{ end_local_date_str }} at {{ end_local_time_str }} and assigned {{ irb_info.IRB_ADMINISTRATIVE_REVIEWER }} as the reviewer. - -### Metrics - - -Days elapsed: {{days_delta }} - Flow_1tj9oz1 - - - - Flow_15jw6a4 - Flow_0op1a19 - Flow_1gb8wca - - "PT.01S" - - - - Flow_0mxlkif - Flow_1tj9oz1 - Flow_0op1a19 - - - counter >= 20 - - - counter < 20 - - - - Flow_0q7fkb7 - Flow_15jw6a4 - counter = 0 - - - - Flow_1d2usdq - Flow_0mxlkif - - - Flow_0q7fkb7 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_call_activity.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_call_activity.bpmn deleted file mode 100644 index ee7f0479..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_call_activity.bpmn +++ /dev/null @@ -1,48 +0,0 @@ - - - - - Flow_175n91v - - - Flow_175n91v - Flow_1d2usdq - counter2 = 1000 - - - - ### Results -Submission for Pre-Review was sent to the HSR-IRB on {{ sent_local_date_str }} at {{ sent_local_time_str }}. - -The HSR-IRB started the Pre-Review process on {{ end_local_date_str }} at {{ end_local_time_str }} and assigned {{ irb_info.IRB_ADMINISTRATIVE_REVIEWER }} as the reviewer. - -### Metrics - - -Days elapsed: {{days_delta }} - Flow_1d2usdq - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_sub_process.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_sub_process.bpmn deleted file mode 100644 index ed1461db..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/too_many_loops_sub_process.bpmn +++ /dev/null @@ -1,157 +0,0 @@ - - - - - Flow_0q7fkb7 - - - - Flow_1gb8wca - Flow_1d2usdq - counter = counter + 1 - - - - ### Results -Submission for Pre-Review was sent to the HSR-IRB on {{ sent_local_date_str }} at {{ sent_local_time_str }}. - -The HSR-IRB started the Pre-Review process on {{ end_local_date_str }} at {{ end_local_time_str }} and assigned {{ irb_info.IRB_ADMINISTRATIVE_REVIEWER }} as the reviewer. - -### Metrics - - -Days elapsed: {{days_delta }} - Flow_1tj9oz1 - - - - Flow_15jw6a4 - Flow_1ivr6d7 - Flow_1gb8wca - - "PT0.01S" - - - - Flow_1d2usdq - Flow_1tj9oz1 - Flow_0op1a19 - - - counter >= 20 - - - counter < 20 - - - - Flow_0q7fkb7 - Flow_15jw6a4 - counter = 0 -counter2 = 0 -counter3 = 0 - - - - Flow_0op1a19 - Flow_1ivr6d7 - - Flow_1fcanuu - - - - Flow_1fcanuu - Flow_04le6u5 - counter2 += 1 - - - Flow_04le6u5 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py index 6c7ea6e2..e8bae0e5 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py @@ -51,7 +51,7 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase): task.set_data(should_escalate=True) self.workflow.do_engine_steps() self.save_restore() - self.workflow.complete_all() + self.workflow.run_all() self.assertEqual(True, self.workflow.is_completed()) self.assertEqual(True, 'EndEvent_specific1_noninterrupting_normal' in completed_set) @@ -81,7 +81,7 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase): task.set_data(should_escalate=False) self.workflow.do_engine_steps() self.save_restore() - self.workflow.complete_all() + self.workflow.run_all() self.assertEqual(True, self.workflow.is_completed()) self.assertEqual(True, 'EndEvent_specific1_noninterrupting_normal' in completed_set) @@ -109,7 +109,7 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase): track_workflow(self.spec, completed_set) self.workflow.do_engine_steps() self.save_restore() - self.workflow.complete_all() + self.workflow.run_all() self.assertEqual(True, self.workflow.is_completed()) self.assertEqual(True, 'EndEvent_specific1_noninterrupting_normal' in completed_set) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py index 529852ee..e1565d4f 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py @@ -28,7 +28,7 @@ class EventBasedGatewayTest(BpmnWorkflowTestCase): if save_restore: self.save_restore() self.workflow.script_engine = self.script_engine - self.assertEqual(len(waiting_tasks), 1) + self.assertEqual(len(waiting_tasks), 2) self.workflow.catch(MessageEventDefinition('message_1')) self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() @@ -41,7 +41,7 @@ class EventBasedGatewayTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() waiting_tasks = self.workflow.get_waiting_tasks() - self.assertEqual(len(waiting_tasks), 1) + self.assertEqual(len(waiting_tasks), 2) timer_event = waiting_tasks[0].task_spec.event_definition.event_definitions[-1] self.workflow.catch(timer_event) self.workflow.refresh_waiting_tasks() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleEventsTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleEventsTest.py index 86c55235..2b6cf5df 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleEventsTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleEventsTest.py @@ -33,7 +33,7 @@ class MultipleEventsTest(BpmnWorkflowTestCase): task = self.workflow.get_tasks(TaskState.READY)[0] # Move to User Task 1 - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_tasks(TaskState.READY)[0] self.assertEqual('UserTaskOne', task.get_name()) @@ -52,10 +52,10 @@ class MultipleEventsTest(BpmnWorkflowTestCase): task = self.workflow.get_tasks(TaskState.READY)[0] # Move to User Task 2 - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_tasks(TaskState.READY)[0] - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() task = self.workflow.get_tasks(TaskState.READY)[0] self.assertEqual('UserTaskTwo', task.get_name()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py index 087951eb..031a1643 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py @@ -42,6 +42,6 @@ class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() ready_tasks = self.workflow.get_ready_user_tasks() self.assertEqual(len(ready_tasks), 1) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() self.assertEqual(self.workflow.is_completed(), True) \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/NITimerDurationBoundaryTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py similarity index 99% rename from SpiffWorkflow/tests/SpiffWorkflow/bpmn/NITimerDurationBoundaryTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py index 29e0f62a..136b3ef3 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/NITimerDurationBoundaryTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py @@ -62,7 +62,7 @@ class NITimerDurationTest(BpmnWorkflowTestCase): task.data['delay_reason'] = 'Just Because' elif task.task_spec.name == 'Activity_Work': task.data['work_done'] = 'Yes' - task.complete() + task.run() self.workflow.refresh_waiting_tasks() self.workflow.do_engine_steps() self.assertEqual(self.workflow.is_completed(),True) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py index 452e71ed..3e16830e 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py @@ -55,11 +55,11 @@ class TimerCycleTest(BpmnWorkflowTestCase): time.sleep(0.05) self.workflow.refresh_waiting_tasks() events = self.workflow.waiting_events() - if loopcount == 0: - # Wait time is 0.1s, so the first time through, there should still be a waiting event + if loopcount < 2: + # Wait time is 0.1s, two child tasks are created self.assertEqual(len(events), 1) else: - # By the second iteration, both should be complete + # By the third iteration, the event should no longer be waiting self.assertEqual(len(events), 0) # Get coffee still ready diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py index 1cd2c17b..d7571b1e 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py @@ -39,7 +39,7 @@ class TimerDurationTest(BpmnWorkflowTestCase): # Make sure the task can still be called. task = self.workflow.get_ready_user_tasks()[0] - task.complete() + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryTest.py index ce248dae..f784ec4f 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryTest.py @@ -24,7 +24,7 @@ class TimerDurationTest(BpmnWorkflowTestCase): def actual_test(self,save_restore = False): self.workflow.do_engine_steps() ready_tasks = self.workflow.get_tasks(TaskState.READY) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() loopcount = 0 @@ -43,7 +43,7 @@ class TimerDurationTest(BpmnWorkflowTestCase): self.assertEqual(subworkflow.state, TaskState.CANCELLED) ready_tasks = self.workflow.get_ready_user_tasks() while len(ready_tasks) > 0: - ready_tasks[0].complete() + ready_tasks[0].run() ready_tasks = self.workflow.get_ready_user_tasks() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py index 02a8389f..de7def17 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py @@ -19,11 +19,11 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks[0].update_data({'value': 'asdf'}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks[0].update_data({'quantity': 2}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() self.assertIn('value', self.workflow.last_task.data) @@ -48,7 +48,7 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase): # If value == '', we cancel ready_tasks[0].update_data({'value': ''}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() # If the subprocess gets cancelled, verify that data set there does not persist @@ -72,13 +72,13 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks[0].update_data({'value': 'asdf'}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() ready_tasks = self.workflow.get_tasks(TaskState.READY) # If quantity == 0, we throw an error with no error code ready_tasks[0].update_data({'quantity': 0}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() # We formerly checked that subprocess data does not persist, but I think it should persist @@ -103,13 +103,13 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase): ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks[0].update_data({'value': 'asdf'}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() ready_tasks = self.workflow.get_tasks(TaskState.READY) # If quantity < 0, we throw 'Error 1' ready_tasks[0].update_data({'quantity': -1}) - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() # The cancel boundary event should be cancelled diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py index 88612867..e2b92ab0 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py @@ -142,7 +142,7 @@ class BpmnWorkflowSerializerTest(BaseTestCase): def test_serialize_workflow_where_script_task_includes_function(self): self.workflow.do_engine_steps() ready_tasks = self.workflow.get_ready_user_tasks() - ready_tasks[0].complete() + ready_tasks[0].run() self.workflow.do_engine_steps() results = self.serializer.serialize_json(self.workflow) assert self.workflow.is_completed() @@ -161,7 +161,7 @@ class BpmnWorkflowSerializerTest(BaseTestCase): self.assertEqual(w1.data, w2.data) self.assertEqual(w1.name, w2.name) for task in w1.get_ready_user_tasks(): - w2_task = w2.get_task(task.id) + w2_task = w2.get_task_from_id(task.id) self.assertIsNotNone(w2_task) self.assertEqual(task.data, w2_task.data) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py index 2b169f14..393b2d76 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py @@ -16,11 +16,12 @@ class Version_1_0_Test(BaseTestCase): def test_convert_subprocess(self): # The serialization used here comes from NestedSubprocessTest saved at line 25 with version 1.0 fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.0.json') - wf = self.serializer.deserialize_json(open(fn).read()) + with open(fn) as fh: + wf = self.serializer.deserialize_json(fh.read()) # We should be able to finish the workflow from this point ready_tasks = wf.get_tasks(TaskState.READY) self.assertEqual('Action3', ready_tasks[0].task_spec.description) - ready_tasks[0].complete() + ready_tasks[0].run() wf.do_engine_steps() self.assertEqual(True, wf.is_completed()) @@ -49,7 +50,7 @@ class Version_1_1_Test(BaseTestCase): self.assertEqual(len(task.task_spec.cond_task_specs), 2) ready_task = wf.get_ready_user_tasks()[0] ready_task.data['NeedClarification'] = 'Yes' - ready_task.complete() + ready_task.run() wf.do_engine_steps() ready_task = wf.get_ready_user_tasks()[0] self.assertEqual(ready_task.task_spec.name, 'Activity_A2') @@ -58,4 +59,15 @@ class Version_1_1_Test(BaseTestCase): fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-multi.json') with self.assertRaises(VersionMigrationError) as ctx: wf = self.serializer.deserialize_json(open(fn).read()) - self.assertEqual(ctx.exception.message, "This workflow cannot be migrated because it contains MultiInstance Tasks") \ No newline at end of file + self.assertEqual(ctx.exception.message, "This workflow cannot be migrated because it contains MultiInstance Tasks") + + def test_remove_loop_reset(self): + fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-loop-reset.json') + wf = self.serializer.deserialize_json(open(fn).read()) + # Allow 3 seconds max to allow this test to complete (there are 20 loops with a 0.1s timer) + end = time.time() + 3 + while not wf.is_completed() and time.time() < end: + wf.do_engine_steps() + wf.refresh_waiting_tasks() + self.assertTrue(wf.is_completed()) + self.assertEqual(wf.last_task.data['counter'], 20) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py index a7d9d6c5..b5177c78 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py @@ -39,7 +39,7 @@ class CallActivityMessageTest(BaseTestCase): current_task = ready_tasks[0] self.assertEqual(current_task.task_spec.name,step[0]) current_task.update_data(step[1]) - current_task.complete() + current_task.run() self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ClashingNameTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ClashingNameTest.py index b078ddf3..a8d68cdc 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ClashingNameTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ClashingNameTest.py @@ -52,7 +52,7 @@ class ClashingNameTest(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -68,7 +68,7 @@ class ClashingNameTest(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py index 23d25634..21f03f75 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py @@ -29,7 +29,7 @@ class DMNCustomScriptTest(BaseTestCase): def complete_manual_task(self): manual_task = self.workflow.get_tasks_from_spec_name('manual_task')[0] - self.workflow.complete_task_from_id(manual_task.id) + self.workflow.run_task_from_id(manual_task.id) self.workflow.do_engine_steps() def testDmnHappy(self): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNDictTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNDictTest.py index 053614b4..38cf09ba 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNDictTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/DMNDictTest.py @@ -16,7 +16,7 @@ class DMNDictTest(BaseTestCase): self.workflow = BpmnWorkflow(self.spec) self.workflow.do_engine_steps() x = self.workflow.get_ready_user_tasks() - self.workflow.complete_task_from_id(x[0].id) + self.workflow.run_task_from_id(x[0].id) self.workflow.do_engine_steps() self.assertDictEqual(self.workflow.last_task.data, self.expectedResult) @@ -25,7 +25,7 @@ class DMNDictTest(BaseTestCase): self.workflow.do_engine_steps() self.save_restore() x = self.workflow.get_ready_user_tasks() - self.workflow.complete_task_from_id(x[0].id) + self.workflow.run_task_from_id(x[0].id) self.workflow.do_engine_steps() self.save_restore() self.assertDictEqual(self.workflow.last_task.data, self.expectedResult) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py index e7d5d13c..74b5dd99 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py @@ -41,7 +41,7 @@ class ExternalMessageBoundaryTest(BaseTestCase): self.assertEqual(True, ready_tasks[1].data['caughtinterrupt']) self.assertEqual('Meaningless User Task',ready_tasks[0].task_spec.description) self.assertEqual(False, ready_tasks[0].data['caughtinterrupt']) - ready_tasks[1].complete() + ready_tasks[1].run() self.workflow.do_engine_steps() # what I think is going on here is that when we hit the reset, it is updating the # last_task and appending the data to whatever happened there, so it would make sense that @@ -52,7 +52,7 @@ class ExternalMessageBoundaryTest(BaseTestCase): # The user activity was cancelled and we should continue from the boundary event self.assertEqual(1, len(ready_tasks),'Expected to have two ready tasks') event = self.workflow.get_tasks_from_spec_name('Event_19detfv')[0] - event.complete() + event.run() self.assertEqual('SomethingDrastic', event.data['reset_var']) self.assertEqual(False, event.data['caughtinterrupt']) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py index 757767d6..a00ed859 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py @@ -31,7 +31,7 @@ class BusinessRuleTaskParserTest(BaseTestCase): self.assertTrue(True, "An error was raised..") self.assertEqual("InvalidDecisionTaskId", we.task_spec.name) self.maxDiff = 1000 - self.assertEquals("Error evaluating expression 'spam= 1'. Rule failed on row 1. Business Rule Task 'Invalid Decision'.", str(we)) + self.assertEqual("Error evaluating expression 'spam= 1'. Rule failed on row 1. Business Rule Task 'Invalid Decision'.", str(we)) def suite(): return unittest.TestLoader().loadTestsFromTestCase(BusinessRuleTaskParserTest) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py index 8c0bf3c9..2b67be61 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py @@ -41,7 +41,7 @@ class MessageBoundaryTest(BaseTestCase): if task.task_spec.name == step[0]: task.update_data(step[1]) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() time.sleep(.01) self.workflow.refresh_waiting_tasks() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py index d0893c0e..c7de8527 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py @@ -23,10 +23,10 @@ class MultiInstanceDMNTest(BaseTestCase): self.save_restore() self.workflow.do_engine_steps() - self.workflow.complete_next() + self.workflow.run_next() self.save_restore() self.workflow.do_engine_steps() - self.workflow.complete_next() + self.workflow.run_next() self.save_restore() self.workflow.do_engine_steps() self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py index a3c79fbb..cd5120da 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py @@ -29,7 +29,7 @@ class NIMessageBoundaryTest(BaseTestCase): ready_tasks = self.workflow.get_tasks(TaskState.READY) self.assertEqual(1, len(ready_tasks)) - self.workflow.complete_task_from_id(ready_tasks[0].id) + self.workflow.run_task_from_id(ready_tasks[0].id) self.workflow.do_engine_steps() # first we run through a couple of steps where we answer No to each @@ -45,7 +45,7 @@ class NIMessageBoundaryTest(BaseTestCase): 'We got a ready task that we did not expect - %s'%( task.task_spec.name)) task.data[response[0]] = response[1] - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() # if we have a list of tasks - that list becomes invalid # after we do a save restore, so I'm completing the list @@ -66,7 +66,7 @@ class NIMessageBoundaryTest(BaseTestCase): 'We got a ready task that we did not expect - %s'%( task.task_spec.name)) task.data[response[0]] = response[1] - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -75,14 +75,14 @@ class NIMessageBoundaryTest(BaseTestCase): task = ready_tasks[0] self.assertEqual(task.task_spec.name,'Activity_DoWork') task.data['work_done'] = 'Yes' - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() ready_tasks = self.workflow.get_tasks(TaskState.READY) self.assertEqual(len(ready_tasks), 1) task = ready_tasks[0] self.assertEqual(task.task_spec.name, 'Activity_WorkCompleted') task.data['work_completed'] = 'Lots of Stuff' - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() self.assertEqual(self.workflow.is_completed(),True) self.assertEqual(self.workflow.last_task.data,{'Event_InterruptBoundary_Response': 'Youre late!', diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ParseMultiInstanceTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ParseMultiInstanceTest.py index 9f612a6d..37f170eb 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ParseMultiInstanceTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ParseMultiInstanceTest.py @@ -32,7 +32,7 @@ class ParseMultiInstanceTest(BaseTestCase): self.assertEqual(len(ready_tasks), 3) for task in ready_tasks: task.data['output_item'] = task.data['output_item'] * 2 - task.complete() + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) @@ -58,7 +58,7 @@ class ParseMultiInstanceTest(BaseTestCase): self.assertEqual(len(ready_tasks), 3) for task in ready_tasks: task.data['output_item'] = task.data['output_item'] * 2 - task.complete() + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) @@ -84,7 +84,7 @@ class ParseMultiInstanceTest(BaseTestCase): self.assertEqual(len(ready_tasks), 3) for task in ready_tasks: task.data['input_item'] = task.data['input_item'] * 2 - task.complete() + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenNestedParallelTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenNestedParallelTest.py index 698eac7e..d8c535ca 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenNestedParallelTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenNestedParallelTest.py @@ -73,7 +73,7 @@ class ResetTokenTestNestedParallel(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() self.workflow.reset_task_from_id(firsttaskid) @@ -91,7 +91,7 @@ class ResetTokenTestNestedParallel(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() notworking = self.workflow.get_ready_user_tasks() @@ -148,7 +148,7 @@ class ResetTokenTestNestedParallel(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -182,7 +182,7 @@ class ResetTokenTestNestedParallel(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelMatrixTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelMatrixTest.py index a5cb0af0..7b90e7f8 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelMatrixTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelMatrixTest.py @@ -76,7 +76,7 @@ class ResetTokenTestParallelMatrix(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -96,7 +96,7 @@ class ResetTokenTestParallelMatrix(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -154,7 +154,7 @@ class ResetTokenTestParallelMatrix(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -190,7 +190,7 @@ class ResetTokenTestParallelMatrix(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelTaskCountTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelTaskCountTest.py index 04eb7cf9..bce088cb 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelTaskCountTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenParallelTaskCountTest.py @@ -29,22 +29,19 @@ class ResetTokenParallelTaskCountTest(BaseTestCase): # number of tasks self.workflow.do_engine_steps() - self.assertEquals(total, len(self.workflow.get_tasks())) + self.assertEqual(total, len(self.workflow.get_tasks())) # Tell the exclusive gateway to skip the parallel tasks section. # We should still have the same number of tasks. data = {'skipParallel': True} task = self.workflow.get_ready_user_tasks()[0] task.data = data - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.assertEquals(total, len(self.workflow.get_tasks())) # Reset the token to the first user task. # We should still have the same number of tasks. - self.workflow.task_tree.dump() task.reset_token({}, reset_data=True) - print('=-----') - self.workflow.task_tree.dump() self.assertEquals(total, len(self.workflow.get_tasks())) self.assertEquals(1, len(self.workflow.get_ready_user_tasks())) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py index f9236c88..f8a288e0 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py @@ -51,7 +51,7 @@ class ResetTokenTestSubProcess(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -75,7 +75,7 @@ class ResetTokenTestSubProcess(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -128,7 +128,7 @@ class ResetTokenTestSubProcess(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -164,7 +164,7 @@ class ResetTokenTestSubProcess(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenTest.py index 555e2068..495e940e 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/ResetTokenTest.py @@ -54,7 +54,7 @@ class ResetTokenTest(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -70,7 +70,7 @@ class ResetTokenTest(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/StartMessageEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/StartMessageEventTest.py index dd6200f5..07e4bf0f 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/StartMessageEventTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/StartMessageEventTest.py @@ -48,7 +48,7 @@ class StartMessageTest(BaseTestCase): current_task = ready_tasks[0] self.assertEqual(current_task.task_spec.name,step[0]) current_task.update_data(step[1]) - current_task.complete() + current_task.run() self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() if save_restore: diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/SubWorkflowTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/SubWorkflowTest.py index 75331022..97aed8de 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/SubWorkflowTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/SubWorkflowTest.py @@ -31,7 +31,7 @@ class SubWorkflowTest(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual("Activity_"+answer, task.task_spec.name) task.update_data({"Field"+answer: answer}) - self.workflow.complete_task_from_id(task.id) + self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() if save_restore: self.save_restore() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/UserTaskParserTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/UserTaskParserTest.py index 56c4f474..d69dcdcc 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/UserTaskParserTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/UserTaskParserTest.py @@ -16,22 +16,22 @@ class UserTaskParserTest(BaseTestCase): def testGetEnumField(self): form = self.spec.task_specs['Task_User_Select_Type'].form - self.assertEquals("Fact", form.key) - self.assertEquals(1, len(form.fields)) - self.assertEquals("type", form.fields[0].id) - self.assertEquals(3, len(form.fields[0].options)) + self.assertEqual("Fact", form.key) + self.assertEqual(1, len(form.fields)) + self.assertEqual("type", form.fields[0].id) + self.assertEqual(3, len(form.fields[0].options)) def testGetFieldProperties(self): form = self.spec.task_specs['Task_User_Select_Type'].form - self.assertEquals(1, len(form.fields[0].properties)) - self.assertEquals('description', form.fields[0].properties[0].id) - self.assertEquals('Choose from the list of available types of random facts', form.fields[0].properties[0].value) + self.assertEqual(1, len(form.fields[0].properties)) + self.assertEqual('description', form.fields[0].properties[0].id) + self.assertEqual('Choose from the list of available types of random facts', form.fields[0].properties[0].value) def testGetFieldValidation(self): form = self.spec.task_specs['Task_User_Select_Type'].form - self.assertEquals(1, len(form.fields[0].validation)) - self.assertEquals('maxlength', form.fields[0].validation[0].name) - self.assertEquals('25', form.fields[0].validation[0].config) + self.assertEqual(1, len(form.fields[0].validation)) + self.assertEqual('maxlength', form.fields[0].validation[0].name) + self.assertEqual('25', form.fields[0].validation[0].config) def testNoFormDoesNotBombOut(self): self.load_workflow_spec('no_form.bpmn', 'no_form') diff --git a/SpiffWorkflow/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py b/SpiffWorkflow/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py index b24f1c37..74726f8c 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py @@ -20,12 +20,11 @@ class UserTaskSpecTest(unittest.TestCase): self.user_spec = self.create_instance() def testConstructor(self): - self.assertEquals(self.user_spec.name, 'userTask') + self.assertEqual(self.user_spec.name, 'userTask') self.assertEqual(self.user_spec.data, {}) self.assertEqual(self.user_spec.defines, {}) self.assertEqual(self.user_spec.pre_assign, []) self.assertEqual(self.user_spec.post_assign, []) - self.assertEqual(self.user_spec.locks, []) def test_set_form(self): self.assertEqual(self.form, self.user_spec.form) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/ControlFlowPatternTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/ControlFlowPatternTest.py new file mode 100644 index 00000000..52286ae2 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/ControlFlowPatternTest.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +from unittest import TestCase +from .pattern_base import WorkflowPatternTestCase + +# This combines the old pattern tests with the old serializer tests, creating one test per pattern +# that tests the tasks in it can be serialized with our serializers and the workflows run with the +# expected output. This format is a little annoying (inheriting from two classes with the actual +# work being done in the secondary class); however, this is the most concise thing I could manage. +# +# There were also a fair amount of never-used options in those tests, so the tests in the base case +# are a lot simpler than the ones they replaced. + +class SequenceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/sequence') + +class ParallelSplitTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/parallel_split') + +class SynchronizationTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/synchronization') + +class ExclusiveChoiceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/exclusive_choice') + +class SimpleMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/simple_merge') + +class MultiChoiceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/multi_choice') + +class StructuredSynchronizingMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/structured_synchronizing_merge') + +class MultiMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/multi_merge') + +class StructuredDiscriminatorTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/structured_discriminator') + +class BlockingDiscriminatorTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/blocking_discriminator') + +class CacncellingDiscriminatorTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancelling_discriminator') + +class StructuredPartialJoin(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/structured_partial_join') + +class BlockingPartialJoin(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/blocking_partial_join') + +class CancellingPartialJoin(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancelling_partial_join') + +class GeneralizedAndJoin(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/generalized_and_join') + +class LocalSynchronizingMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/acyclic_synchronizing_merge') + +class GeneralSynchronizingMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/general_synchronizing_merge') + +class ThreadMergeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/thread_merge') + +class ThreadSplitTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/thread_split') + +class MultiInstanceWithoutSynchonizationTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/multi_instance_without_synch') + +class MultiInstanceWithDesignTimeKnowledgeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/multi_instance_with_a_priori_design_time_knowledge') + +class MultiInstanceWithRunTimeKnowledgeTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/multi_instance_with_a_priori_run_time_knowledge') + +class StaticPartialJoinMultiInstanceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/static_partial_join_for_multi_instance') + +class CancellingPartialJoinMultiInstanceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancelling_partial_join_for_multi_instance') + +class DynamicPartialJoinMultiInstanceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/dynamic_partial_join_for_multi_instance') + +class DeferredChoiceTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/deferred_choice') + +class InterleavedParallelRoutingTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/interleaved_parallel_routing') + +class MilestoneTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/milestone') + +class CriticalSectionTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/critical_section') + +class InterleavedRoutingTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/interleaved_routing') + +class CancelTaskTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancel_task') + +class CancelCaseTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancel_case') + +class CancelRegionTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancel_region') + +class CancelMultiInstanceTaskTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/cancel_multi_instance_task') + +class CompleteMultiInstanceTaskTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/complete_multiple_instance_activity') + +class ArbitraryCyclesTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/arbitrary_cycles') + +class RecursionTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/recursion') + + # I am disabling this test becuse I have wasted an entire day trying to make it pass + # The workflow completes and the task tree is as expected, but the subworkflow tasks + # no longer appear in the taken path. This is because they are connected to the subworkflow + # in on_reached_cb, which now occurs after they are executed. + # Moving subworkflow creation to predict would likely fix the problem, but there are problems + # with prediction that also need to be fixed as well. + + #def test_run_workflow(self): + # pass + +class ImplicitTerminationTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/implicit_termination') + +class ExplicitTerminationTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/explicit_termination') + +class TransientTriggerTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/transient_trigger') + +class PersistentTriggerTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('control-flow/persistent_trigger') diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/DataPatternTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/DataPatternTest.py new file mode 100644 index 00000000..2561afb4 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/DataPatternTest.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +from unittest import TestCase +from .pattern_base import WorkflowPatternTestCase + + +class TaskDataTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('data/task_data') + +class BlockDataTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('data/block_data') + +class TaskToTaskTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('data/task_to_task') + +class BlockToSubworkflowTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('data/block_to_subworkflow') + +class SubworkflowTOBlockTest(TestCase, WorkflowPatternTestCase): + def setUp(self): + self.load_from_xml('data/subworkflow_to_block') \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/ExecuteProcessMock.py b/SpiffWorkflow/tests/SpiffWorkflow/core/ExecuteProcessMock.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/ExecuteProcessMock.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/ExecuteProcessMock.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/PersistSmallWorkflowTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/PersistSmallWorkflowTest.py similarity index 92% rename from SpiffWorkflow/tests/SpiffWorkflow/PersistSmallWorkflowTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/PersistSmallWorkflowTest.py index b554be18..2b2b03c0 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/PersistSmallWorkflowTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/PersistSmallWorkflowTest.py @@ -1,9 +1,5 @@ # -*- coding: utf-8 -*- - -import sys import unittest -import os.path -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) from SpiffWorkflow.workflow import Workflow from SpiffWorkflow.specs.Join import Join @@ -51,15 +47,15 @@ class PersistSmallWorkflowTest(unittest.TestCase): tasks = workflow.get_tasks(TaskState.READY) task_start = tasks[0] - workflow.complete_task_from_id(task_start.id) + workflow.run_task_from_id(task_start.id) tasks = workflow.get_tasks(TaskState.READY) multichoice = tasks[0] - workflow.complete_task_from_id(multichoice.id) + workflow.run_task_from_id(multichoice.id) tasks = workflow.get_tasks(TaskState.READY) task_a1 = tasks[0] - workflow.complete_task_from_id(task_a1.id) + workflow.run_task_from_id(task_a1.id) return workflow def testDictionarySerializer(self): @@ -104,7 +100,7 @@ class PersistSmallWorkflowTest(unittest.TestCase): """ old_workflow = self.workflow - old_workflow.complete_next() + old_workflow.run_next() self.assertEqual('task_a2', old_workflow.last_task.get_name()) serializer = DictionarySerializer() serialized_workflow = old_workflow.serialize(serializer) @@ -112,7 +108,7 @@ class PersistSmallWorkflowTest(unittest.TestCase): serializer = DictionarySerializer() new_workflow = Workflow.deserialize(serializer, serialized_workflow) self.assertEqual('task_a2', old_workflow.last_task.get_name()) - new_workflow.complete_all() + new_workflow.run_all() self.assertEqual('task_a2', old_workflow.last_task.get_name()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/TaskTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/TaskTest.py similarity index 97% rename from SpiffWorkflow/tests/SpiffWorkflow/TaskTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/TaskTest.py index 77e28df6..9f5e0d12 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/TaskTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/TaskTest.py @@ -69,7 +69,7 @@ class TaskTest(unittest.TestCase): # Run the iterator test. result = '' - for thetask in Task.Iterator(root, Task.MAYBE): + for thetask in Task.Iterator(root, TaskState.MAYBE): result += thetask.get_dump(0, False) + '\n' self.assertTrue(expected2.match(result), 'Expected:\n' + repr(expected2.pattern) + '\n' + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/WorkflowTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/WorkflowTest.py similarity index 86% rename from SpiffWorkflow/tests/SpiffWorkflow/WorkflowTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/WorkflowTest.py index 273ced0d..726598e9 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/WorkflowTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/WorkflowTest.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- -import sys import unittest import os -data_dir = os.path.join(os.path.dirname(__file__), 'data') -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) from lxml import etree @@ -15,6 +12,7 @@ from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec from SpiffWorkflow.task import TaskState from SpiffWorkflow.serializer.prettyxml import XmlSerializer +data_dir = os.path.join(os.path.dirname(__file__), 'data') class WorkflowTest(unittest.TestCase): @@ -27,7 +25,7 @@ class WorkflowTest(unittest.TestCase): """ Simulates interactive calls, as would be issued by a user. """ - xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml') + xml_file = os.path.join(data_dir, 'workflow1.xml') with open(xml_file) as fp: xml = etree.parse(fp).getroot() wf_spec = WorkflowSpec.deserialize(XmlSerializer(), xml) @@ -36,7 +34,7 @@ class WorkflowTest(unittest.TestCase): tasks = workflow.get_tasks(TaskState.READY) self.assertEqual(len(tasks), 1) self.assertEqual(tasks[0].task_spec.name, 'Start') - workflow.complete_task_from_id(tasks[0].id) + workflow.run_task_from_id(tasks[0].id) self.assertEqual(tasks[0].state, TaskState.COMPLETED) tasks = workflow.get_tasks(TaskState.READY) @@ -47,7 +45,7 @@ class WorkflowTest(unittest.TestCase): self.assertEqual(task_a1.task_spec.name, 'task_a1') self.assertEqual(task_b1.task_spec.__class__, Simple) self.assertEqual(task_b1.task_spec.name, 'task_b1') - workflow.complete_task_from_id(task_a1.id) + workflow.run_task_from_id(task_a1.id) self.assertEqual(task_a1.state, TaskState.COMPLETED) tasks = workflow.get_tasks(TaskState.READY) @@ -56,16 +54,16 @@ class WorkflowTest(unittest.TestCase): task_a2 = tasks[0] self.assertEqual(task_a2.task_spec.__class__, Simple) self.assertEqual(task_a2.task_spec.name, 'task_a2') - workflow.complete_task_from_id(task_a2.id) + workflow.run_task_from_id(task_a2.id) tasks = workflow.get_tasks(TaskState.READY) self.assertEqual(len(tasks), 1) self.assertTrue(task_b1 in tasks) - workflow.complete_task_from_id(task_b1.id) + workflow.run_task_from_id(task_b1.id) tasks = workflow.get_tasks(TaskState.READY) self.assertEqual(len(tasks), 1) - workflow.complete_task_from_id(tasks[0].id) + workflow.run_task_from_id(tasks[0].id) tasks = workflow.get_tasks(TaskState.READY) self.assertEqual(len(tasks), 1) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/__init__.py b/SpiffWorkflow/tests/SpiffWorkflow/core/__init__.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/__init__.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/__init__.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/__init__.py b/SpiffWorkflow/tests/SpiffWorkflow/core/data/__init__.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/__init__.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/__init__.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/acyclic_synchronizing_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/acyclic_synchronizing_merge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/acyclic_synchronizing_merge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/acyclic_synchronizing_merge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/acyclic_synchronizing_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/acyclic_synchronizing_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/acyclic_synchronizing_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/acyclic_synchronizing_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/arbitrary_cycles.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/arbitrary_cycles.path new file mode 100644 index 00000000..3bc55e36 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/arbitrary_cycles.path @@ -0,0 +1,9 @@ +Start + first + excl_choice_1 + go_to_repetition + first + excl_choice_1 + task_c1 + last + End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/arbitrary_cycles.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/arbitrary_cycles.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/arbitrary_cycles.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/arbitrary_cycles.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_discriminator.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_discriminator.path new file mode 100644 index 00000000..c17c3fcd --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_discriminator.path @@ -0,0 +1,15 @@ +Start + first + task_f1 + struct_discriminator_1 + excl_choice_1 + first + task_f1 + struct_discriminator_1 + excl_choice_1 + last + End + task_f2 + task_f3 + task_f2 + task_f3 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_discriminator.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_discriminator.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_discriminator.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_discriminator.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_partial_join.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_partial_join.path new file mode 100644 index 00000000..147b4346 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_partial_join.path @@ -0,0 +1,15 @@ +Start + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + excl_choice_1 + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + excl_choice_1 + last + End + task_e4 + task_e4 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_partial_join.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_partial_join.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_partial_join.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/blocking_partial_join.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_case.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_case.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_case.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_case.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_case.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_case.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_case.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_case.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_multi_instance_task.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_multi_instance_task.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_multi_instance_task.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_multi_instance_task.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_multi_instance_task.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_multi_instance_task.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_multi_instance_task.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_multi_instance_task.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_region.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_region.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_region.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_region.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_region.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_region.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_region.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_region.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_task.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_task.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_task.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_task.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_task.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_task.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancel_task.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancel_task.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_discriminator.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_discriminator.path new file mode 100644 index 00000000..91efc867 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_discriminator.path @@ -0,0 +1,11 @@ +Start + first + task_f1 + struct_discriminator_1 + excl_choice_1 + first + task_f1 + struct_discriminator_1 + excl_choice_1 + last + End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_discriminator.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_discriminator.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_discriminator.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_discriminator.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join.path new file mode 100644 index 00000000..4c0ef3a7 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join.path @@ -0,0 +1,13 @@ +Start + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + excl_choice_1 + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + excl_choice_1 + last + End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join_for_multi_instance.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join_for_multi_instance.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join_for_multi_instance.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join_for_multi_instance.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join_for_multi_instance.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join_for_multi_instance.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join_for_multi_instance.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/cancelling_partial_join_for_multi_instance.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/complete_multiple_instance_activity.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/complete_multiple_instance_activity.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/complete_multiple_instance_activity.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/complete_multiple_instance_activity.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/complete_multiple_instance_activity.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/complete_multiple_instance_activity.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/complete_multiple_instance_activity.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/complete_multiple_instance_activity.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/critical_section.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/critical_section.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/critical_section.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/critical_section.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/critical_section.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/critical_section.xml similarity index 91% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/critical_section.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/critical_section.xml index cb3e93b8..94e88460 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/critical_section.xml +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/critical_section.xml @@ -10,20 +10,16 @@ - lock_one one_2 - lock_two one_3 - lock_three two_2 one_4 - lock_four last diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/deferred_choice.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/deferred_choice.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/deferred_choice.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/deferred_choice.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/deferred_choice.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/deferred_choice.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/deferred_choice.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/deferred_choice.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/dynamic_partial_join_for_multi_instance.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/dynamic_partial_join_for_multi_instance.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/dynamic_partial_join_for_multi_instance.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/dynamic_partial_join_for_multi_instance.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/dynamic_partial_join_for_multi_instance.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/dynamic_partial_join_for_multi_instance.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/dynamic_partial_join_for_multi_instance.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/dynamic_partial_join_for_multi_instance.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/exclusive_choice.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/exclusive_choice.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/exclusive_choice.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/exclusive_choice.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/exclusive_choice.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/exclusive_choice.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/exclusive_choice.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/exclusive_choice.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/explicit_termination.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/explicit_termination.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/explicit_termination.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/explicit_termination.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/explicit_termination.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/explicit_termination.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/explicit_termination.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/explicit_termination.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/general_synchronizing_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/general_synchronizing_merge.path new file mode 100644 index 00000000..60f862fa --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/general_synchronizing_merge.path @@ -0,0 +1,19 @@ +Start + first + task_a1 + task_b1 + task_c1 + loop_back_to_c1_once + task_c1 + loop_back_to_c1_once + task_c2 + go_to_stub + stub_1 + loop_back_to_stub_1_once + stub_1 + loop_back_to_stub_1_once + join + End + go_to_stub_3 + stub_3 + foo diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/general_synchronizing_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/general_synchronizing_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/general_synchronizing_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/general_synchronizing_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/generalized_and_join.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/generalized_and_join.path new file mode 100644 index 00000000..436467f6 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/generalized_and_join.path @@ -0,0 +1,21 @@ +Start + first + task_e1 + task_f1 + task_e2 + task_f2 + task_e3 + task_f3 + struct_synch_merge_1 + excl_choice_1 + first + task_e1 + task_f1 + task_e2 + task_f2 + task_e3 + task_f3 + struct_synch_merge_1 + excl_choice_1 + last + End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/generalized_and_join.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/generalized_and_join.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/generalized_and_join.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/generalized_and_join.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/implicit_termination.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/implicit_termination.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/implicit_termination.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/implicit_termination.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/implicit_termination.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/implicit_termination.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/implicit_termination.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/implicit_termination.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_parallel_routing.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_parallel_routing.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_parallel_routing.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_parallel_routing.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_parallel_routing.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_parallel_routing.xml similarity index 87% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_parallel_routing.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_parallel_routing.xml index 6ff3f7f1..cc4e5d32 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_parallel_routing.xml +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_parallel_routing.xml @@ -10,18 +10,15 @@ - one_task_at_a_time join - one_task_at_a_time two2 - one_task_at_a_time join diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_routing.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_routing.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_routing.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_routing.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_routing.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_routing.xml similarity index 81% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_routing.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_routing.xml index 7cf4a99d..d9bb123a 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/interleaved_routing.xml +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/interleaved_routing.xml @@ -9,7 +9,6 @@ - one_task_at_a_time one1 two1 three1 @@ -18,25 +17,20 @@ - one_task_at_a_time join - one_task_at_a_time join - one_task_at_a_time join - one_task_at_a_time join - one_task_at_a_time last diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/milestone.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/milestone.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/milestone.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/milestone.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/milestone.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/milestone.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/milestone.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/milestone.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_choice.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_choice.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_choice.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_choice.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_choice.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_choice.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_choice.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_choice.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_design_time_knowledge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_design_time_knowledge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_design_time_knowledge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_design_time_knowledge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_design_time_knowledge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_design_time_knowledge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_design_time_knowledge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_design_time_knowledge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_run_time_knowledge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_run_time_knowledge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_run_time_knowledge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_run_time_knowledge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_run_time_knowledge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_run_time_knowledge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_with_a_priori_run_time_knowledge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_with_a_priori_run_time_knowledge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_a_priori.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_a_priori.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_a_priori.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_a_priori.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_a_priori.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_a_priori.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_a_priori.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_a_priori.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_synch.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_synch.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_synch.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_synch.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_synch.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_synch.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_instance_without_synch.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_instance_without_synch.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_merge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_merge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_merge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/multi_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/multi_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/parallel_split.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/parallel_split.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/parallel_split.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/parallel_split.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/parallel_split.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/parallel_split.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/parallel_split.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/parallel_split.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/persistent_trigger.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/persistent_trigger.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/persistent_trigger.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/persistent_trigger.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/persistent_trigger.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/persistent_trigger.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/persistent_trigger.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/persistent_trigger.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/recursion.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/recursion.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/recursion.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/recursion.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/recursion.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/recursion.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/recursion.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/recursion.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/sequence.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/sequence.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/sequence.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/sequence.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/sequence.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/sequence.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/sequence.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/sequence.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/simple_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/simple_merge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/simple_merge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/simple_merge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/simple_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/simple_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/simple_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/simple_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/static_partial_join_for_multi_instance.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/static_partial_join_for_multi_instance.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/static_partial_join_for_multi_instance.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/static_partial_join_for_multi_instance.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/static_partial_join_for_multi_instance.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/static_partial_join_for_multi_instance.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/static_partial_join_for_multi_instance.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/static_partial_join_for_multi_instance.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_discriminator.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_discriminator.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_discriminator.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_discriminator.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_discriminator.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_discriminator.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_discriminator.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_discriminator.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_partial_join.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_partial_join.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_partial_join.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_partial_join.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_partial_join.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_partial_join.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_partial_join.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_partial_join.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_synchronizing_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_synchronizing_merge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_synchronizing_merge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_synchronizing_merge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_synchronizing_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_synchronizing_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/structured_synchronizing_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/structured_synchronizing_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join_inner.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join_inner.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/subworkflow_to_join_inner.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/subworkflow_to_join_inner.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/synchronization.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/synchronization.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/synchronization.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/synchronization.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/synchronization.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/synchronization.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/synchronization.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/synchronization.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_merge.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_merge.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_merge.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_merge.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_merge.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_merge.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_merge.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_split.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_split.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_split.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_split.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_split.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_split.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/thread_split.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/thread_split.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/transient_trigger.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/transient_trigger.path similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/transient_trigger.path rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/transient_trigger.path diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/transient_trigger.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/transient_trigger.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/transient_trigger.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/control-flow/transient_trigger.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data.xml.data b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data.xml.data similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data.xml.data rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data.xml.data diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data_inner.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data_inner.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_data_inner.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_data_inner.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow.xml.data b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow.xml.data similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow.xml.data rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow.xml.data diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow_inner.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow_inner.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/block_to_subworkflow_inner.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/block_to_subworkflow_inner.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block.xml.data b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block.xml.data similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block.xml.data rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block.xml.data diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block_inner.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block_inner.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/subworkflow_to_block_inner.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/subworkflow_to_block_inner.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_data.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_data.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_data.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_data.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_data.xml.data b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_data.xml.data similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_data.xml.data rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_data.xml.data diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_to_task.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_to_task.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_to_task.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_to_task.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_to_task.xml.data b/SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_to_task.xml.data similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/data/task_to_task.xml.data rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/data/task_to_task.xml.data diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/resource/.gitignore b/SpiffWorkflow/tests/SpiffWorkflow/core/data/resource/.gitignore similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/resource/.gitignore rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/resource/.gitignore diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.path b/SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.path new file mode 100644 index 00000000..fa7bb65a --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.path @@ -0,0 +1,42 @@ +Start + task_a1 + task_a2 + task_b1 + task_b2 + synch_1 + excl_choice_1 + task_c1 + excl_choice_2 + task_d3 + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + task_f1 + struct_discriminator_1 + excl_choice_3 + excl_choice_1 + task_c1 + excl_choice_2 + task_d3 + multi_choice_1 + task_e1 + task_e3 + struct_synch_merge_1 + task_f1 + struct_discriminator_1 + excl_choice_3 + multi_instance_1 + task_g1 + task_g2 + task_g1 + task_g2 + task_g1 + task_g2 + struct_synch_merge_2 + last + End + task_f2 + task_f3 + task_f2 + task_f3 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.py b/SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.xml b/SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.xml similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.xml rename to SpiffWorkflow/tests/SpiffWorkflow/core/data/workflow1.xml diff --git a/SpiffWorkflow/tests/SpiffWorkflow/docTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/docTest.py similarity index 90% rename from SpiffWorkflow/tests/SpiffWorkflow/docTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/docTest.py index 3a1ef1cf..7d600eb3 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/docTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/docTest.py @@ -4,8 +4,8 @@ import sys import unittest import os dirname = os.path.abspath(os.path.dirname(__file__)) -sys.path.insert(0, os.path.join(dirname, '..', '..')) -doc_dir = os.path.join(dirname, '..', '..', 'doc') +sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) +doc_dir = os.path.join(dirname, '..', '..', '..', 'doc') class TutorialTest(object): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/core/pattern_base.py b/SpiffWorkflow/tests/SpiffWorkflow/core/pattern_base.py new file mode 100644 index 00000000..50cb6c78 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/pattern_base.py @@ -0,0 +1,123 @@ +import os +import time +import warnings + +from lxml import etree + +from SpiffWorkflow.workflow import Workflow +from SpiffWorkflow.task import Task +from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec + +from SpiffWorkflow.serializer.prettyxml import XmlSerializer as PrettyXmlSerializer +from SpiffWorkflow.serializer.xml import XmlSerializer +from SpiffWorkflow.serializer.dict import DictionarySerializer +from SpiffWorkflow.serializer.json import JSONSerializer + +from SpiffWorkflow.serializer.exceptions import TaskNotSupportedError + +from .util import track_workflow + +data_dir = os.path.join(os.path.dirname(__file__), 'data') +xml_serializer = XmlSerializer() +dict_serializer = DictionarySerializer() +json_serializer = JSONSerializer() + +class WorkflowPatternTestCase: + + def init_thread_pool(self): + Task.id_pool = 0 + Task.thread_id_pool = 0 + + def load_from_xml(self, pattern): + + self.init_thread_pool() + prefix = os.path.join(data_dir, pattern) + filename = f'{prefix}.xml' + with open(filename) as fp: + xml = etree.parse(fp).getroot() + # This "serializer" is a parser; it doesn't deserialize. + # Because we use it to load all the workflows, consider it tested here. + serializer = PrettyXmlSerializer() + self.spec = WorkflowSpec.deserialize(serializer, xml, filename=filename) + + path_file = f'{prefix}.path' + if os.path.exists(path_file): + with open(path_file) as fp: + self.expected_path = fp.read() + else: + self.expected_path = None + + data_file = f'{prefix}.data' + if os.path.exists(data_file): + with open(data_file) as fp: + self.expected_data = fp.read() + else: + self.expected_data = None + + self.taken_path = track_workflow(self.spec) + self.workflow = Workflow(self.spec) + + def serialize(self, spec_or_workflow, serializer): + + try: + before = spec_or_workflow.serialize(serializer) + restored = spec_or_workflow.deserialize(serializer, before) + after = restored.serialize(serializer) + return before, after + except TaskNotSupportedError as exc: + warnings.warn(f'Unsupported task spec: {exc}') + return None, None + + def run_workflow(self): + # We allow the workflow to require a maximum of 5 seconds to complete, to allow for testing long running tasks. + for i in range(10): + self.workflow.run_all(False) + if self.workflow.is_completed(): + break + time.sleep(0.5) + + def test_run_workflow(self): + + self.run_workflow() + self.assertTrue(self.workflow.is_completed()) + + # Check whether the correct route was taken. + if self.expected_path is not None: + taken_path = '\n'.join(self.taken_path) + '\n' + self.assertEqual(taken_path, self.expected_path) + + # Check data availibility. + if self.expected_data is not None: + result = self.workflow.get_data('data', '') + self.assertIn(result, self.expected_data) + + def test_xml_serializer(self): + + prepare_result = lambda item: etree.tostring(item, pretty_print=True) + + before, after = self.serialize(self.spec, xml_serializer) + self.assertEqual(prepare_result(before), prepare_result(after)) + self.assertIsInstance(before, etree._Element) + + before, after = self.serialize(self.workflow, xml_serializer) + if before is not None: + self.assertEqual(prepare_result(before), prepare_result(after)) + + def test_dictionary_serializer(self): + + before, after = self.serialize(self.spec, dict_serializer) + self.assertDictEqual(before, after) + self.assertIsInstance(before, dict) + + before, after = self.serialize(self.workflow, dict_serializer) + if before is not None: + self.assertDictEqual(before, after) + + def test_json_serializer(self): + + before, after = self.serialize(self.spec, json_serializer) + self.assertEqual(before, after) + self.assertIsInstance(before, str) + + before, after = self.serialize(self.workflow, json_serializer) + self.assertEqual(before, after) \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/CeleryTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/CeleryTest.py similarity index 90% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/CeleryTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/CeleryTest.py index ac811688..5f4232d7 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/CeleryTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/CeleryTest.py @@ -1,10 +1,8 @@ # -*- coding: utf-8 -*- -import os -import sys import unittest import pickle -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) + from .TaskSpecTest import TaskSpecTest from SpiffWorkflow.specs.Celery import Celery from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec @@ -64,12 +62,17 @@ class CeleryTest(TaskSpecTest): data = {'R': b64encode(pickle.dumps('1'))} # Comes from live data. Bug not identified, but there we are... data = {'inputs': ['Wait:1'], 'lookahead': 2, 'description': '', - 'outputs': [], 'args': args, + 'outputs': [], + 'args': args, 'manual': False, - 'data': data, 'locks': [], 'pre_assign': [], + 'data': data, + 'pre_assign': [], 'call': 'call.x', - 'internal': False, 'post_assign': [], 'id': 8, - 'result_key': None, 'defines': data, + 'internal': False, + 'post_assign': [], + 'id': 8, + 'result_key': None, + 'defines': data, 'class': 'SpiffWorkflow.specs.Celery.Celery', 'name': 'RS1:1'} Celery.deserialize(serializer, new_wf_spec, data) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/DeepMergeTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/DeepMergeTest.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/DeepMergeTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/DeepMergeTest.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/ExecuteTest.py similarity index 96% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/ExecuteTest.py index 3a10ae75..7bb677d7 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/ExecuteTest.py @@ -1,15 +1,12 @@ # -*- coding: utf-8 -*- - - - import os import unittest -from tests.SpiffWorkflow.util import run_workflow -from .TaskSpecTest import TaskSpecTest from SpiffWorkflow.task import TaskState from SpiffWorkflow.specs.Execute import Execute +from .TaskSpecTest import TaskSpecTest +from ..util import run_workflow class ExecuteTest(TaskSpecTest): CORRELATE = Execute diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/JoinTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/JoinTest.py similarity index 85% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/JoinTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/JoinTest.py index 7224701f..9aca17d6 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/JoinTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/JoinTest.py @@ -1,15 +1,10 @@ # -*- coding: utf-8 -*- - - -import os -import sys import unittest -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) -from .TaskSpecTest import TaskSpecTest from SpiffWorkflow.specs.Join import Join +from .TaskSpecTest import TaskSpecTest class JoinTest(TaskSpecTest): CORRELATE = Join diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/MergeTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/MergeTest.py similarity index 96% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/MergeTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/MergeTest.py index 6fc692a6..745e18b2 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/MergeTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/MergeTest.py @@ -1,11 +1,6 @@ # -*- coding: utf-8 -*- - - -import os -import sys import unittest -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) from .JoinTest import JoinTest from SpiffWorkflow.specs.Merge import Merge @@ -61,7 +56,7 @@ class MergeTest(JoinTest): workflow.task_tree.set_data(everywhere=1) for task in workflow.get_tasks(): task.set_data(**{'name': task.get_name(), task.get_name(): 1}) - workflow.complete_all() + workflow.run_all() self.assertTrue(workflow.is_completed()) found = {} for task in workflow.get_tasks(): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/SubWorkflowTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/SubWorkflowTest.py similarity index 93% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/SubWorkflowTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/SubWorkflowTest.py index 3e99f993..892fff21 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/SubWorkflowTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/SubWorkflowTest.py @@ -1,13 +1,9 @@ # -*- coding: utf-8 -*- - -import sys import unittest import os from lxml import etree -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) - from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec from SpiffWorkflow.specs.SubWorkflow import SubWorkflow from SpiffWorkflow.serializer.prettyxml import XmlSerializer @@ -29,7 +25,7 @@ class TaskSpecTest(unittest.TestCase): def load_workflow_spec(self, folder, f): file = os.path.join( - os.path.dirname(__file__), '..', 'data', 'spiff', folder, f) + os.path.dirname(__file__), '..', 'data', folder, f) serializer = XmlSerializer() with open(file) as fp: xml = etree.parse(fp).getroot() @@ -46,17 +42,16 @@ class TaskSpecTest(unittest.TestCase): self.assertEqual(1, len(ready_tasks)) task = ready_tasks[0] self.assertEqual(name, task.task_spec.name) - task.complete() + task.run() def do_next_named_step(self, name, other_ready_tasks): # This method completes a single task from the specified set of ready # tasks ready_tasks = self.workflow.get_tasks(TaskState.READY) all_tasks = sorted([name] + other_ready_tasks) - self.assertEqual( - all_tasks, sorted([t.task_spec.name for t in ready_tasks])) + self.assertEqual(all_tasks, sorted([t.task_spec.name for t in ready_tasks])) task = list([t for t in ready_tasks if t.task_spec.name == name])[0] - task.complete() + task.run() def test_block_to_subworkflow(self): self.load_workflow_spec('data', 'block_to_subworkflow.xml') diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/TaskSpecTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/TaskSpecTest.py similarity index 96% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/TaskSpecTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/TaskSpecTest.py index 71784656..ea36f758 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/TaskSpecTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/TaskSpecTest.py @@ -1,9 +1,5 @@ # -*- coding: utf-8 -*- - -import sys import unittest -import os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) from SpiffWorkflow.specs.Join import Join from SpiffWorkflow.specs.Simple import Simple @@ -32,7 +28,6 @@ class TaskSpecTest(unittest.TestCase): self.assertEqual(self.spec.defines, {}) self.assertEqual(self.spec.pre_assign, []) self.assertEqual(self.spec.post_assign, []) - self.assertEqual(self.spec.locks, []) def testSetData(self): self.assertEqual(self.spec.get_data('foo'), None) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/TransformTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/TransformTest.py similarity index 91% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/TransformTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/TransformTest.py index 228a038e..77d82671 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/TransformTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/TransformTest.py @@ -1,13 +1,7 @@ # -*- coding: utf-8 -*- - - - -import os -import sys import unittest -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) -from tests.SpiffWorkflow.util import run_workflow +from ..util import run_workflow from .TaskSpecTest import TaskSpecTest from SpiffWorkflow.specs.Transform import Transform from SpiffWorkflow.specs.Simple import Simple diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/WorkflowSpecTest.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/WorkflowSpecTest.py similarity index 83% rename from SpiffWorkflow/tests/SpiffWorkflow/specs/WorkflowSpecTest.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/WorkflowSpecTest.py index 1ad5b125..8c619ad7 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/WorkflowSpecTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/WorkflowSpecTest.py @@ -1,26 +1,20 @@ # -*- coding: utf-8 -*- - -from builtins import zip -from builtins import range -import os -import sys import unittest -data_dir = os.path.join(os.path.dirname(__file__), '..', 'data') -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) - +import os +import pickle from lxml import etree -import pickle from random import randint -try: - from util import track_workflow -except ImportError as e: - from tests.SpiffWorkflow.util import track_workflow + from SpiffWorkflow.workflow import Workflow from SpiffWorkflow.specs.Join import Join from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec from SpiffWorkflow.serializer.prettyxml import XmlSerializer +from ..util import track_workflow + +data_dir = os.path.join(os.path.dirname(__file__), '..', 'data') + serializer = XmlSerializer() data_file = 'data.pkl' @@ -49,7 +43,7 @@ class WorkflowSpecTest(unittest.TestCase): # Execute a random number of steps. for i in range(randint(0, len(workflow.spec.task_specs))): - workflow.complete_next() + workflow.run_next() # Store the workflow instance in a file. with open(data_file, 'wb') as fp: @@ -70,19 +64,14 @@ class WorkflowSpecTest(unittest.TestCase): taken_path = track_workflow(workflow.spec, taken_path) # Run the rest of the workflow. - workflow.complete_all() + workflow.run_all() after = workflow.get_dump() self.assertTrue(workflow.is_completed(), 'Workflow not complete:' + after) - # taken_path = '\n'.join(taken_path) + '\n' - if taken_path != expected_path: - for taken, expected in zip(taken_path, expected_path): - print("TAKEN: ", taken) - print("EXPECTED:", expected) self.assertEqual(expected_path, taken_path) def testSerialize(self): # Read a complete workflow spec. - xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml') + xml_file = os.path.join(data_dir, 'workflow1.xml') with open(xml_file) as fp: xml = etree.parse(fp).getroot() path_file = os.path.splitext(xml_file)[0] + '.path' diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/__init__.py b/SpiffWorkflow/tests/SpiffWorkflow/core/specs/__init__.py similarity index 100% rename from SpiffWorkflow/tests/SpiffWorkflow/serializer/__init__.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/specs/__init__.py diff --git a/SpiffWorkflow/tests/SpiffWorkflow/util.py b/SpiffWorkflow/tests/SpiffWorkflow/core/util.py similarity index 83% rename from SpiffWorkflow/tests/SpiffWorkflow/util.py rename to SpiffWorkflow/tests/SpiffWorkflow/core/util.py index eeba7d52..c8f02a4f 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/util.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/core/util.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import time -from SpiffWorkflow.task import Task, TaskState from SpiffWorkflow.workflow import Workflow +from SpiffWorkflow.specs.SubWorkflow import SubWorkflow def on_reached_cb(workflow, task, taken_path): @@ -38,7 +38,12 @@ def on_reached_cb(workflow, task, taken_path): old = task.get_data('data', '') data = task.get_name() + ': ' + atts + '/' + props + '\n' task.set_data(data=old + data) + return True +def on_complete_cb(workflow, task, taken_path): + # Record the path. + indent = ' ' * (task._get_depth() - 1) + taken_path.append('%s%s' % (indent, task.get_name())) # In workflows that load a subworkflow, the newly loaded children # will not have on_reached_cb() assigned. By using this function, we # re-assign the function in every step, thus making sure that new @@ -47,22 +52,24 @@ def on_reached_cb(workflow, task, taken_path): track_task(child.task_spec, taken_path) return True - -def on_complete_cb(workflow, task, taken_path): - # Record the path. - indent = ' ' * (task._get_depth() - 1) - taken_path.append('%s%s' % (indent, task.get_name())) +def on_entered_cb(workflow, task, taken_path): + for child in task.children: + track_task(child.task_spec, taken_path) return True - def track_task(task_spec, taken_path): + # Disconnecting and reconnecting makes absolutely no sense but inexplicably these tests break + # if just connected based on a check that they're not if task_spec.reached_event.is_connected(on_reached_cb): task_spec.reached_event.disconnect(on_reached_cb) task_spec.reached_event.connect(on_reached_cb, taken_path) if task_spec.completed_event.is_connected(on_complete_cb): task_spec.completed_event.disconnect(on_complete_cb) task_spec.completed_event.connect(on_complete_cb, taken_path) - + if isinstance(task_spec, SubWorkflow): + if task_spec.entered_event.is_connected(on_entered_cb): + task_spec.entered_event.disconnect(on_entered_cb) + task_spec.entered_event.connect(on_entered_cb, taken_path) def track_workflow(wf_spec, taken_path=None): if taken_path is None: @@ -71,7 +78,6 @@ def track_workflow(wf_spec, taken_path=None): track_task(wf_spec.task_specs[name], taken_path) return taken_path - def run_workflow(test, wf_spec, expected_path, expected_data, workflow=None): # Execute all tasks within the Workflow. if workflow is None: @@ -85,7 +91,7 @@ def run_workflow(test, wf_spec, expected_path, expected_data, workflow=None): # We allow the workflow to require a maximum of 5 seconds to # complete, to allow for testing long running tasks. for i in range(10): - workflow.complete_all(False) + workflow.run_all(False) if workflow.is_completed(): break time.sleep(0.5) @@ -93,13 +99,7 @@ def run_workflow(test, wf_spec, expected_path, expected_data, workflow=None): workflow.task_tree.dump() raise - # workflow.task_tree.dump() - test.assertTrue(workflow.is_completed(), workflow.task_tree.get_dump()) - - # Make sure that there are no waiting tasks left in the tree. - for thetask in Task.Iterator(workflow.task_tree, TaskState.READY): - workflow.task_tree.dump() - raise Exception('Task with state READY: %s' % thetask.name) + test.assertTrue(workflow.is_completed()) # Check whether the correct route was taken. if expected_path is not None: diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/empty1.xml b/SpiffWorkflow/tests/SpiffWorkflow/data/empty1.xml deleted file mode 100644 index e69de29b..00000000 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/empty2.xml b/SpiffWorkflow/tests/SpiffWorkflow/data/empty2.xml deleted file mode 100644 index 4adc209d..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/empty2.xml +++ /dev/null @@ -1 +0,0 @@ - diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/arbitrary_cycles.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/arbitrary_cycles.path deleted file mode 100644 index 3009ea82..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/arbitrary_cycles.path +++ /dev/null @@ -1,10 +0,0 @@ -Start - first - excl_choice_1 - go_to_repetition - return_to_first - first - excl_choice_1 - task_c1 - last - End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_discriminator.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_discriminator.path deleted file mode 100644 index aa1fa1eb..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_discriminator.path +++ /dev/null @@ -1,14 +0,0 @@ -Start - first - task_f1 - struct_discriminator_1 - excl_choice_1 - return_to_first - first - task_f1 - struct_discriminator_1 - excl_choice_1 - last - End - task_f2 - task_f3 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_partial_join.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_partial_join.path deleted file mode 100644 index ac1c7c85..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/blocking_partial_join.path +++ /dev/null @@ -1,15 +0,0 @@ -Start - multi_choice_1 - task_e1 - task_e3 - struct_synch_merge_1 - excl_choice_1 - return_to_multi_choice_1 - multi_choice_1 - task_e1 - task_e3 - struct_synch_merge_1 - excl_choice_1 - last - End - task_e4 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_discriminator.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_discriminator.path deleted file mode 100644 index a56b255e..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_discriminator.path +++ /dev/null @@ -1,12 +0,0 @@ -Start - first - task_f1 - struct_discriminator_1 - excl_choice_1 - return_to_first - first - task_f1 - struct_discriminator_1 - excl_choice_1 - last - End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join.path deleted file mode 100644 index 74fd5b2f..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/cancelling_partial_join.path +++ /dev/null @@ -1,13 +0,0 @@ -Start - multi_choice_1 - task_e1 - task_e3 - struct_synch_merge_1 - excl_choice_1 - return_to_multi_choice_1 - multi_choice_1 - task_e1 - struct_synch_merge_1 - excl_choice_1 - last - End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/general_synchronizing_merge.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/general_synchronizing_merge.path deleted file mode 100644 index 4c575808..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/general_synchronizing_merge.path +++ /dev/null @@ -1,21 +0,0 @@ -Start - first - task_a1 - task_b1 - task_c1 - loop_back_to_c1_once - join - End - return_to_task_c1 - task_c1 - loop_back_to_c1_once - task_c2 - go_to_stub - stub_1 - loop_back_to_stub_1_once - return_to_stub_1 - stub_1 - loop_back_to_stub_1_once - go_to_stub_3 - stub_3 - foo diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/generalized_and_join.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/generalized_and_join.path deleted file mode 100644 index 7fbb5fff..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/control-flow/generalized_and_join.path +++ /dev/null @@ -1,22 +0,0 @@ -Start - first - task_e1 - task_f1 - task_e2 - task_f2 - task_e3 - task_f3 - struct_synch_merge_1 - excl_choice_1 - return_to_first - first - task_e1 - task_f1 - task_e2 - task_f2 - task_e3 - task_f3 - struct_synch_merge_1 - excl_choice_1 - last - End diff --git a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.path b/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.path deleted file mode 100644 index 0b5bc042..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/data/spiff/workflow1.path +++ /dev/null @@ -1,41 +0,0 @@ -Start - task_a1 - task_a2 - task_b1 - task_b2 - synch_1 - excl_choice_1 - task_c1 - excl_choice_2 - task_d3 - multi_choice_1 - task_e1 - task_e3 - struct_synch_merge_1 - task_f1 - struct_discriminator_1 - excl_choice_3 - return_to_excl_choice_1 - excl_choice_1 - task_c1 - excl_choice_2 - task_d3 - multi_choice_1 - task_e1 - task_e3 - struct_synch_merge_1 - task_f1 - struct_discriminator_1 - excl_choice_3 - multi_instance_1 - task_g1 - task_g2 - task_g1 - task_g2 - task_g1 - task_g2 - struct_synch_merge_2 - last - End - task_f2 - task_f3 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/dmn/python_engine/InvalidBusinessRuleNameErrorTest.py b/SpiffWorkflow/tests/SpiffWorkflow/dmn/python_engine/InvalidBusinessRuleNameErrorTest.py index 4e613057..adc474d4 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/dmn/python_engine/InvalidBusinessRuleNameErrorTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/dmn/python_engine/InvalidBusinessRuleNameErrorTest.py @@ -10,7 +10,7 @@ class InvalidBusinessRuleNameErrorTest(unittest.TestCase): try: res = runner.decide({'spam': 1}) except Exception as e: - self.assertRegexpMatches(str(e), "Did you mean 'spam'") + self.assertRegex(str(e), "Did you mean 'spam'") def suite(): return unittest.TestLoader().loadTestsFromTestCase(InvalidBusinessRuleNameErrorTest) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/baseTest.py b/SpiffWorkflow/tests/SpiffWorkflow/serializer/baseTest.py deleted file mode 100644 index 7ac41f3c..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/serializer/baseTest.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- - -from builtins import str -import sys -import unittest -import os -import warnings -dirname = os.path.dirname(__file__) -data_dir = os.path.join(dirname, '..', 'data') -sys.path.insert(0, os.path.join(dirname, '..')) - -from PatternTest import run_workflow, PatternTest -from SpiffWorkflow.serializer.base import Serializer -from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec -from SpiffWorkflow.workflow import Workflow -from SpiffWorkflow.serializer.exceptions import TaskNotSupportedError - - -class SerializerTest(PatternTest): - - def setUp(self): - super(SerializerTest, self).setUp() - self.serializer = Serializer() - self.return_type = None - - def _prepare_result(self, item): - return item - - def _compare_results(self, item1, item2, exclude_dynamic=False, - exclude_items=None): - #with open('1.xml', 'w') as fp: fp.write(item1) - #with open('2.xml', 'w') as fp: fp.write(item2) - self.assertEqual(item1.decode('utf8'), item2.decode('utf8')) - - def _test_roundtrip_serialization(self, obj): - # Test round trip serialization. - try: - serialized1 = obj.serialize(self.serializer) - restored = obj.__class__.deserialize(self.serializer, serialized1) - serialized2 = restored.serialize(self.serializer) - except TaskNotSupportedError as e: - warnings.warn('unsupported task spec: ' + str(e)) - return - self.assertIsInstance(serialized1, self.return_type) - self.assertIsInstance(serialized2, self.return_type) - serialized1 = self._prepare_result(serialized1) - serialized2 = self._prepare_result(serialized2) - self._compare_results(serialized1, serialized2) - return serialized1 - - def _test_workflow_spec(self, test): - spec_result1 = self._test_roundtrip_serialization(test.spec) - spec_result2 = self._test_roundtrip_serialization(test.spec) - self.assertEqual(spec_result1, spec_result2) - self._compare_results(spec_result1, spec_result2) - - workflow = run_workflow(self, test.spec, test.path, test.data) - spec_result3 = self._test_roundtrip_serialization(test.spec) - wf_result3 = self._test_roundtrip_serialization(workflow) - # We can't compare spec_result 2 and 3, because starting a workflow - # implicitely causes a Root node to be added to the workflow spec. - # (No, that doesn't seem to be a clean solution.) - # self.assertEqual(spec_result2, spec_result3) - # self._compare_results(spec_result2, spec_result3) - - def testWorkflowSpec(self): - if type(self.serializer) is Serializer: - spec = self.workflows[0].spec - wf = Workflow(spec) - self.assertRaises(NotImplementedError, spec.serialize, - self.serializer) - self.assertRaises(NotImplementedError, - WorkflowSpec.deserialize, self.serializer, None) - self.assertRaises(NotImplementedError, wf.serialize, - self.serializer) - self.assertRaises(NotImplementedError, - Workflow.deserialize, self.serializer, None) - return - - for test in self.workflows: - print(test.filename) - self._test_workflow_spec(test) - - -def suite(): - return unittest.defaultTestLoader.loadTestsFromTestCase(SerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/dictTest.py b/SpiffWorkflow/tests/SpiffWorkflow/serializer/dictTest.py deleted file mode 100644 index 4143ec60..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/serializer/dictTest.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- - -from builtins import str -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) - -import uuid -from SpiffWorkflow.serializer.dict import DictionarySerializer -from .baseTest import SerializerTest -from SpiffWorkflow.workflow import Workflow - - -class DictionarySerializerTest(SerializerTest): - - def setUp(self): - super(DictionarySerializerTest, self).setUp() - self.serializer = DictionarySerializer() - self.return_type = dict - - def _compare_results(self, item1, item2, - exclude_dynamic=False, - exclude_items=None): - exclude_items = exclude_items if exclude_items is not None else [] - if exclude_dynamic: - if 'last_state_change' not in exclude_items: - exclude_items.append('last_state_change') - if 'last_task' not in exclude_items: - exclude_items.append('last_task') - if uuid.UUID not in exclude_items: - exclude_items.append(uuid.UUID) - if type(item1) in exclude_items: - return - - if isinstance(item1, dict): - self.assertIsInstance(item2, dict) - for key, value in list(item1.items()): - self.assertIn(key, item2) - if key in exclude_items: - continue - self._compare_results(value, item2[key], - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - for key in item2: - self.assertIn(key, item1) - - elif isinstance(item1, list): - msg = "item is not a list (is a " + str(type(item2)) + ")" - self.assertIsInstance(item2, list, msg) - msg = "list lengths differ: {} vs {}".format( - len(item1), len(item2)) - self.assertEqual(len(item1), len(item2), msg) - for i, listitem in enumerate(item1): - self._compare_results(listitem, item2[i], - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - - elif isinstance(item1, Workflow): - raise Exception("Item is a Workflow") - - else: - msg = "{}: types differ: {} vs {}".format( - str(item2), type(item1), type(item2)) - self.assertEqual(type(item1), type(item2), msg) - self.assertEqual(item1, item2) - - -def suite(): - return unittest.defaultTestLoader.loadTestsFromTestCase(DictionarySerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/jsonTest.py b/SpiffWorkflow/tests/SpiffWorkflow/serializer/jsonTest.py deleted file mode 100644 index cfdaa4d6..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/serializer/jsonTest.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) - -import json -from SpiffWorkflow.serializer.json import JSONSerializer -from .dictTest import DictionarySerializerTest - - -class JSONSerializerTest(DictionarySerializerTest): - - def setUp(self): - super(JSONSerializerTest, self).setUp() - self.serializer = JSONSerializer() - self.return_type = str - - def _prepare_result(self, item): - return json.loads(item) - - def _compare_results(self, item1, item2, exclude_dynamic=False, - exclude_items=None): - if exclude_dynamic: - exclude_items = ['__uuid__'] - else: - exclude_items = [] - super(JSONSerializerTest, self)._compare_results(item1, item2, - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - - -def suite(): - return unittest.defaultTestLoader.loadTestsFromTestCase(JSONSerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/prettyxmlTest.py b/SpiffWorkflow/tests/SpiffWorkflow/serializer/prettyxmlTest.py deleted file mode 100644 index 25a604c6..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/serializer/prettyxmlTest.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -data_dir = os.path.join(dirname, '..', 'data') -sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) - -from SpiffWorkflow.serializer.prettyxml import XmlSerializer -from .baseTest import SerializerTest - - -class XmlSerializerTest(SerializerTest): - - def setUp(self): - super(XmlSerializerTest, self).setUp() - self.serializer = XmlSerializer() - self.return_type = str - - def testWorkflowSpec(self): - # Nothing to test here: The deserialization is already used in setUp() - # to load all specs, and serialization is not supported. - pass - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(XmlSerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/serializer/xmlTest.py b/SpiffWorkflow/tests/SpiffWorkflow/serializer/xmlTest.py deleted file mode 100644 index bdffc6e9..00000000 --- a/SpiffWorkflow/tests/SpiffWorkflow/serializer/xmlTest.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -sys.path.insert(0, os.path.join(dirname, '..')) -sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) - -from lxml import etree -from SpiffWorkflow.serializer.xml import XmlSerializer -from serializer.baseTest import SerializerTest - - -class XmlSerializerTest(SerializerTest): - - def setUp(self): - super(XmlSerializerTest, self).setUp() - self.serializer = XmlSerializer() - self.return_type = etree._Element - - def _prepare_result(self, item): - return etree.tostring(item, pretty_print=True) - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(XmlSerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/__init__.py b/SpiffWorkflow/tests/SpiffWorkflow/specs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/SpiffWorkflow/tests/SpiffWorkflow/spiff/BaseTestCase.py b/SpiffWorkflow/tests/SpiffWorkflow/spiff/BaseTestCase.py index 5b8533f5..fc54e82b 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/spiff/BaseTestCase.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/spiff/BaseTestCase.py @@ -3,13 +3,10 @@ import os from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser, VALIDATOR from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG -from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase -SPIFF_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter) - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG) class BaseTestCase(BpmnWorkflowTestCase): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/spiff/BusinessRuleTaskTest.py b/SpiffWorkflow/tests/SpiffWorkflow/spiff/BusinessRuleTaskTest.py index f738b7da..c32262c5 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/spiff/BusinessRuleTaskTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/spiff/BusinessRuleTaskTest.py @@ -9,4 +9,4 @@ class BusinessRuleTaskTest(BaseTestCase): self.workflow = BpmnWorkflow(spec, subprocesses) self.save_restore() self.workflow.do_engine_steps() - self.assertTrue(self.workflow.is_completed()) \ No newline at end of file + self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/spiff/CorrelationTest.py b/SpiffWorkflow/tests/SpiffWorkflow/spiff/CorrelationTest.py index c44be2a4..014e8f8e 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/spiff/CorrelationTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/spiff/CorrelationTest.py @@ -23,14 +23,14 @@ class CorrelationTest(BaseTestCase): task.data['task_num'] = idx task.data['task_name'] = f'subprocess {idx}' task.data['extra_data'] = f'unused data' - task.complete() + task.run() self.workflow.do_engine_steps() ready_tasks = self.workflow.get_ready_user_tasks() for task in ready_tasks: self.assertEqual(task.task_spec.name, 'prepare_response') response = 'OK' if task.data['source_task']['num'] else 'No' task.data.update(response=response) - task.complete() + task.run() self.workflow.do_engine_steps() # If the messages were routed properly, the task number should match the response id for task in self.workflow.get_tasks_from_spec_name('subprocess_end'): diff --git a/SpiffWorkflow/tests/SpiffWorkflow/spiff/MultiInstanceTaskTest.py b/SpiffWorkflow/tests/SpiffWorkflow/spiff/MultiInstanceTaskTest.py index 61e89e1f..31108b9b 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/spiff/MultiInstanceTaskTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/spiff/MultiInstanceTaskTest.py @@ -19,7 +19,7 @@ class MultiInstanceTaskTest(BaseTestCase): ready_tasks = self.workflow.get_ready_user_tasks() for task in ready_tasks: task.data['output_item'] = task.data['input_item'] * 2 - task.complete() + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py b/SpiffWorkflow/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py index 320586a9..f928f80c 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py @@ -27,7 +27,7 @@ class PrescriptPostsciptTest(BaseTestCase): self.set_process_data({'b': 2}) ready_tasks = self.workflow.get_tasks(TaskState.READY) # This execute the same script as task_test - ready_tasks[0].complete() + ready_tasks[0].run() # a should be removed, b should be unchanged, and c and z should be present (but not x & y) self.assertDictEqual({'b': 2, 'c': 12, 'z': 6}, ready_tasks[0].data) @@ -43,7 +43,7 @@ class PrescriptPostsciptTest(BaseTestCase): # The prescript sets x, y = a * 2, b * 2 and creates the variable z = x + y # The postscript sets c = z * 2 and deletes x and y # a and b should remain unchanged, and c and z should be added - ready_tasks[0].complete() + ready_tasks[0].run() self.assertDictEqual({'a': 1, 'b': 2, 'c': 12, 'z': 6}, ready_tasks[0].data) def test_for_error(self, save_restore=False): diff --git a/bpmn-js-spiffworkflow/.github/workflows/auto-merge-dependabot-prs.yml b/bpmn-js-spiffworkflow/.github/workflows/auto-merge-dependabot-prs.yml index f5e175f9..ddf2ab3e 100644 --- a/bpmn-js-spiffworkflow/.github/workflows/auto-merge-dependabot-prs.yml +++ b/bpmn-js-spiffworkflow/.github/workflows/auto-merge-dependabot-prs.yml @@ -15,7 +15,7 @@ jobs: steps: - name: Dependabot metadata id: metadata - uses: dependabot/fetch-metadata@v1.3.4 + uses: dependabot/fetch-metadata@v1.3.6 with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for Dependabot PRs diff --git a/bpmn-js-spiffworkflow/.github/workflows/labeler.yml b/bpmn-js-spiffworkflow/.github/workflows/labeler.yml index 9da4ff43..074c3a3f 100644 --- a/bpmn-js-spiffworkflow/.github/workflows/labeler.yml +++ b/bpmn-js-spiffworkflow/.github/workflows/labeler.yml @@ -14,6 +14,6 @@ jobs: uses: actions/checkout@v3 - name: Run Labeler - uses: crazy-max/ghaction-github-labeler@v4.0.0 + uses: crazy-max/ghaction-github-labeler@v4.1.0 with: skip-delete: true diff --git a/bpmn-js-spiffworkflow/app/css/app.css b/bpmn-js-spiffworkflow/app/css/app.css index 6beed193..295da8b0 100644 --- a/bpmn-js-spiffworkflow/app/css/app.css +++ b/bpmn-js-spiffworkflow/app/css/app.css @@ -78,3 +78,6 @@ html, body { right: 10px; } +.djs-palette.two-column.open { + width: 95px; +} diff --git a/bpmn-js-spiffworkflow/app/spiffworkflow/DataObject/DataObjectInterceptor.js b/bpmn-js-spiffworkflow/app/spiffworkflow/DataObject/DataObjectInterceptor.js index 1dc65380..df544bd7 100644 --- a/bpmn-js-spiffworkflow/app/spiffworkflow/DataObject/DataObjectInterceptor.js +++ b/bpmn-js-spiffworkflow/app/spiffworkflow/DataObject/DataObjectInterceptor.js @@ -46,13 +46,14 @@ export default class DataObjectInterceptor extends CommandInterceptor { } } else if (is(businessObject, 'bpmn:DataObject')) { // For data objects, only update the flowElements for new data objects, and set the parent so it doesn't get moved. - if (typeof(businessObject.$parent) === 'undefined') { + if (typeof (businessObject.$parent) === 'undefined') { const flowElements = realParent.get('flowElements'); flowElements.push(businessObject); businessObject.$parent = realParent; } - } else - bpmnUpdater.__proto__.updateSemanticParent.call(this, businessObject, parentBusinessObject); + } else { + bpmnUpdater.__proto__.updateSemanticParent.call(bpmnUpdater, businessObject, parentBusinessObject); + } }; /** diff --git a/bpmn-js-spiffworkflow/app/spiffworkflow/extensions/propertiesPanel/SpiffExtensionSelect.js b/bpmn-js-spiffworkflow/app/spiffworkflow/extensions/propertiesPanel/SpiffExtensionSelect.js index 9f7bbbaa..33fb0c67 100644 --- a/bpmn-js-spiffworkflow/app/spiffworkflow/extensions/propertiesPanel/SpiffExtensionSelect.js +++ b/bpmn-js-spiffworkflow/app/spiffworkflow/extensions/propertiesPanel/SpiffExtensionSelect.js @@ -84,10 +84,6 @@ function requestOptions(eventBus, element, commandStack, optionType) { // or you risk a race condition. eventBus.once(`spiff.${optionType}.returned`, (event) => { spiffExtensionOptions[optionType] = event.options; - commandStack.execute('element.updateProperties', { - element, - properties: {}, - }); }); eventBus.fire(`spiff.${optionType}.requested`, { eventBus }); } diff --git a/bpmn-js-spiffworkflow/app/spiffworkflow/messages/propertiesPanel/CorrelationPropertiesArray.js b/bpmn-js-spiffworkflow/app/spiffworkflow/messages/propertiesPanel/CorrelationPropertiesArray.js index 86a82353..0e73336b 100644 --- a/bpmn-js-spiffworkflow/app/spiffworkflow/messages/propertiesPanel/CorrelationPropertiesArray.js +++ b/bpmn-js-spiffworkflow/app/spiffworkflow/messages/propertiesPanel/CorrelationPropertiesArray.js @@ -255,40 +255,6 @@ function MessageCorrelationKeySelect(props) { }); } -function CorrelationPropertyIdTextField(props) { - const { - id, - element, - correlationPropertyModdleElement, - commandStack, - translate, - } = props; - - const debounce = useService('debounceInput'); - const setValue = (value) => { - commandStack.execute('element.updateModdleProperties', { - element, - moddleElement: correlationPropertyModdleElement, - properties: { - id: value, - }, - }); - }; - - const getValue = () => { - return correlationPropertyModdleElement.id; - }; - - return TextFieldEntry({ - element, - id: `${id}-id-textField`, - label: translate('ID'), - getValue, - setValue, - debounce, - }); -} - function CorrelationPropertyNameTextField(props) { const { id, @@ -305,6 +271,7 @@ function CorrelationPropertyNameTextField(props) { moddleElement: correlationPropertyModdleElement, properties: { name: value, + id: value, }, }); }; diff --git a/bpmn-js-spiffworkflow/webpack.config.js b/bpmn-js-spiffworkflow/webpack.config.js index 8fe08cf5..0774e48e 100644 --- a/bpmn-js-spiffworkflow/webpack.config.js +++ b/bpmn-js-spiffworkflow/webpack.config.js @@ -35,7 +35,6 @@ module.exports = { new CopyWebpackPlugin({ patterns: [ { from: 'assets/**', to: 'vendor/bpmn-js', context: 'node_modules/bpmn-js/dist/' }, - { from: '*.css', to: 'vendor/bpmn-js-color-picker', context: 'node_modules/bpmn-js-color-picker/colors' }, { from: 'assets/**', to: 'vendor/bpmn-js-properties-panel', diff --git a/poetry.lock b/poetry.lock index b71632be..a63cec16 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,110 +1,17 @@ -[[package]] -name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "alembic" -version = "1.8.1" -description = "A database migration tool for SQLAlchemy." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" - -[package.extras] -tz = ["python-dateutil"] - -[[package]] -name = "amqp" -version = "5.1.1" -description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -vine = ">=5.0.0" - -[[package]] -name = "aniso8601" -version = "9.0.1" -description = "A library for parsing ISO 8601 strings." -category = "main" -optional = false -python-versions = "*" - -[package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] - -[[package]] -name = "apscheduler" -version = "3.10.0" -description = "In-process task scheduler with Cron-like capabilities" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytz = "*" -setuptools = ">=0.7" -six = ">=1.4.0" -tzlocal = ">=2.0,<3.0.0 || >=4.0.0" - -[package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -gevent = ["gevent"] -mongodb = ["pymongo (>=3.0)"] -redis = ["redis (>=3.0)"] -rethinkdb = ["rethinkdb (>=2.4.0)"] -sqlalchemy = ["sqlalchemy (>=1.4)"] -testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] -tornado = ["tornado (>=4.3)"] -twisted = ["twisted"] -zookeeper = ["kazoo"] - -[[package]] -name = "astroid" -version = "2.12.12" -description = "An abstract syntax tree for Python with inference support." -category = "main" -optional = false -python-versions = ">=3.7.2" - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} - [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -pytz = ">=2015.7" +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "bandit" @@ -125,44 +32,9 @@ test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", toml = ["toml"] yaml = ["PyYAML"] -[[package]] -name = "bcrypt" -version = "4.0.1" -description = "Modern password hashing for your software and your servers" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] - -[[package]] -name = "beautifulsoup4" -version = "4.11.1" -description = "Screen-scraping library" -category = "dev" -optional = false -python-versions = ">=3.6.0" - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "billiard" -version = "3.6.4.0" -description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "black" -version = "23.1a1" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -171,6 +43,7 @@ python-versions = ">=3.7" [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" @@ -180,73 +53,6 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "blinker" -version = "1.5" -description = "Fast, simple object-to-object and broadcast signaling" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "celery" -version = "5.2.7" -description = "Distributed Task Queue." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -billiard = ">=3.6.4.0,<4.0" -click = ">=8.0.3,<9.0" -click-didyoumean = ">=0.0.3" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.2.3,<6.0" -pytz = ">=2021.3" -vine = ">=5.0.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=1.3.2)"] -auth = ["cryptography"] -azureblockblob = ["azure-storage-blob (==12.9.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (<3.21.0)"] -consul = ["python-consul2"] -cosmosdbsql = ["pydocumentdb (==2.3.2)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb"] -django = ["Django (>=1.11)"] -dynamodb = ["boto3 (>=1.9.178)"] -elasticsearch = ["elasticsearch"] -eventlet = ["eventlet (>=0.32.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=1.5.0)"] -memcache = ["pylibmc"] -mongodb = ["pymongo[srv] (>=3.11.1)"] -msgpack = ["msgpack"] -pymemcache = ["python-memcached"] -pyro = ["pyro4"] -pytest = ["pytest-celery"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] -s3 = ["boto3 (>=1.9.125)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem"] -sqlalchemy = ["sqlalchemy"] -sqs = ["kombu[sqs]"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard"] - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "cfgv" version = "3.3.1" @@ -255,17 +61,6 @@ category = "dev" optional = false python-versions = ">=3.6.1" -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "classify-imports" version = "4.2.0" @@ -278,129 +73,21 @@ python-versions = ">=3.7" name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "click-didyoumean" -version = "0.3.0" -description = "Enables git-like *did-you-mean* feature in click" -category = "main" -optional = false -python-versions = ">=3.6.2,<4.0.0" - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.2.0" -description = "REPL plugin for Click" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = "*" -prompt-toolkit = "*" -six = "*" - -[[package]] -name = "clickclick" -version = "20.10.2" -description = "Click utility functions" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = ">=4.0" -PyYAML = ">=3.11" - [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -[[package]] -name = "configparser" -version = "5.3.0" -description = "Updated configparser from stdlib for earlier Pythons." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] - -[[package]] -name = "connexion" -version = "2.14.1" -description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -clickclick = ">=1.2,<21" -flask = ">=1.0.4,<3" -inflection = ">=0.3.1,<0.6" -itsdangerous = ">=0.24" -jsonschema = ">=2.5.1,<5" -packaging = ">=20" -PyYAML = ">=5.1,<7" -requests = ">=2.9.1,<3" -swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} -werkzeug = ">=1.0,<3" - -[package.extras] -aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] -docs = ["sphinx-autoapi (==1.8.1)"] -flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] -swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] -tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "darglint" -version = "1.8.1" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - [[package]] name = "distlib" version = "0.3.6" @@ -413,52 +100,21 @@ python-versions = "*" name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "dparse" -version = "0.6.2" -description = "A parser for Python dependency files" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -packaging = "*" -toml = "*" - -[package.extras] -conda = ["pyyaml"] -pipenv = ["pipenv"] - -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - [[package]] name = "filelock" -version = "3.8.0" +version = "3.10.7" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -489,7 +145,7 @@ pycodestyle = "*" [[package]] name = "flake8-bugbear" -version = "22.10.27" +version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -504,11 +160,11 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "flake8-docstrings" -version = "1.6.0" +version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3" @@ -538,198 +194,21 @@ flake8 = ">=3.0.0" pygments = "*" restructuredtext-lint = "*" -[[package]] -name = "flask" -version = "2.2.2" -description = "A simple framework for building complex web applications." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0" -itsdangerous = ">=2.0" -Jinja2 = ">=3.0" -Werkzeug = ">=2.2.2" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-admin" -version = "1.6.0" -description = "Simple and extensible admin interface framework for Flask" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -Flask = ">=0.7" -wtforms = "*" - -[package.extras] -aws = ["boto"] -azure = ["azure-storage-blob"] - -[[package]] -name = "flask-bcrypt" -version = "1.0.1" -description = "Brcrypt hashing for Flask." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -bcrypt = ">=3.1.1" -Flask = "*" - -[[package]] -name = "flask-bpmn" -version = "0.0.0" -description = "Flask Bpmn" -category = "main" -optional = false -python-versions = "^3.7" -develop = false - -[package.dependencies] -click = "^8.0.1" -flask = "*" -flask-admin = "*" -flask-bcrypt = "*" -flask-cors = "*" -flask-mail = "*" -flask-marshmallow = "*" -flask-migrate = "*" -flask-restful = "*" -greenlet = "^2.0.1" -sentry-sdk = "*" -sphinx-autoapi = "^2.0.0" -spiffworkflow = "*" -werkzeug = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/flask-bpmn" -reference = "main" -resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" - -[[package]] -name = "flask-cors" -version = "3.0.10" -description = "A Flask extension adding a decorator for CORS support" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask = ">=0.9" -Six = "*" - -[[package]] -name = "flask-mail" -version = "0.9.1" -description = "Flask extension for sending email" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -blinker = "*" -Flask = "*" - -[[package]] -name = "flask-marshmallow" -version = "0.14.0" -description = "Flask + marshmallow for beautiful APIs" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask = "*" -marshmallow = ">=2.0.0" -six = ">=1.9.0" - -[package.extras] -dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] - -[[package]] -name = "flask-migrate" -version = "3.1.0" -description = "SQLAlchemy database migrations for Flask applications using Alembic." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -alembic = ">=0.7" -Flask = ">=0.9" -Flask-SQLAlchemy = ">=1.0" - -[[package]] -name = "flask-restful" -version = "0.3.9" -description = "Simple framework for creating REST APIs" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8" -pytz = "*" -six = ">=1.3.0" - -[package.extras] -docs = ["sphinx"] - -[[package]] -name = "flask-sqlalchemy" -version = "3.0.2" -description = "Add SQLAlchemy support to your Flask application." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -Flask = ">=2.2" -SQLAlchemy = ">=1.4.18" - -[[package]] -name = "furo" -version = "2022.9.29" -description = "A clean customisable Sphinx documentation theme." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -beautifulsoup4 = "*" -pygments = ">=2.7" -sphinx = ">=4.0,<6.0" -sphinx-basic-ng = "*" - [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" @@ -737,38 +216,9 @@ python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -[[package]] -name = "greenlet" -version = "2.0.1" -description = "Lightweight in-process concurrent programming" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] - -[[package]] -name = "gunicorn" -version = "20.1.0" -description = "WSGI HTTP Server for UNIX" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -setuptools = ">=3.0" - -[package.extras] -eventlet = ["eventlet (>=0.24.1)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -tornado = ["tornado (>=0.2)"] - [[package]] name = "identify" -version = "2.5.7" +version = "2.5.22" description = "File identification library for Python" category = "dev" optional = false @@ -777,209 +227,6 @@ python-versions = ">=3.7" [package.extras] license = ["ukkonen"] -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonschema" -version = "4.16.0" -description = "An implementation of JSON Schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "kombu" -version = "5.2.4" -description = "Messaging library for Python." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -amqp = ">=5.0.9,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.0.0)"] -azurestoragequeues = ["azure-storage-queue"] -consul = ["python-consul (>=0.6.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=3.3.0,<3.12.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] - -[[package]] -name = "lazy-object-proxy" -version = "1.8.0" -description = "A fast and thorough lazy object proxy." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "livereload" -version = "2.6.3" -description = "Python LiveReload is an awesome tool for web developers" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} - -[[package]] -name = "lxml" -version = "4.9.1" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "mako" -version = "1.2.3" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "marshmallow" -version = "3.18.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "marshmallow-enum" -version = "1.5.1" -description = "Enum field for Marshmallow" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -marshmallow = ">=2.0.0" - -[[package]] -name = "marshmallow-sqlalchemy" -version = "0.28.1" -description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -marshmallow = ">=3.0.0" -packaging = ">=21.3" -SQLAlchemy = ">=1.3.0" - -[package.extras] -dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] -tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] - [[package]] name = "mccabe" version = "0.6.1" @@ -988,46 +235,13 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "mypy" -version = "0.982" -description = "Optional static typing for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -mypy-extensions = ">=0.4.3" -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false -python-versions = "*" - -[[package]] -name = "mysql-connector-python" -version = "8.0.31" -description = "MySQL driver written in Python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -protobuf = ">=3.11.0,<=3.20.1" - -[package.extras] -compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"] -dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"] -gssapi = ["gssapi (>=1.6.9,<=1.8.1)"] +python-versions = ">=3.5" [[package]] name = "nodeenv" @@ -1042,18 +256,15 @@ setuptools = "*" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" -category = "main" +category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -1061,50 +272,27 @@ python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.11.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false python-versions = ">=2.6" -[[package]] -name = "pep8-naming" -version = "0.13.2" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -flake8 = ">=3.9.1" - [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" -version = "2.20.0" +version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1115,12 +303,11 @@ cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" +virtualenv = ">=20.10.0" [[package]] name = "pre-commit-hooks" -version = "4.3.0" +version = "4.4.0" description = "Some out-of-the-box hooks for pre-commit." category = "dev" optional = false @@ -1129,41 +316,6 @@ python-versions = ">=3.7" [package.dependencies] "ruamel.yaml" = ">=0.15" -[[package]] -name = "prompt-toolkit" -version = "3.0.31" -description = "Library for building powerful interactive command lines in Python" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "3.20.1" -description = "Protocol Buffers" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "psycopg2" -version = "2.9.5" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "pycodestyle" version = "2.8.0" @@ -1174,17 +326,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1196,171 +348,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." -category = "main" +category = "dev" optional = false python-versions = ">=3.6" [package.extras] plugins = ["importlib-metadata"] -[[package]] -name = "pyjwt" -version = "2.6.0" -description = "JSON Web Token implementation in Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyrsistent" -version = "0.18.1" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-flask" -version = "1.2.0" -description = "A set of py.test fixtures to test Flask applications." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -Flask = "*" -pytest = ">=5.2" -Werkzeug = ">=0.7" - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] - -[[package]] -name = "pytest-flask-sqlalchemy" -version = "1.1.0" -description = "A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask-SQLAlchemy = ">=2.3" -packaging = ">=14.1" -pytest = ">=3.2.1" -pytest-mock = ">=1.6.2" -SQLAlchemy = ">=1.2.2" - -[package.extras] -tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"] - -[[package]] -name = "pytest-mock" -version = "3.10.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "python-keycloak" -version = "2.6.0" -description = "python-keycloak is a Python package providing access to the Keycloak API." -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -python-jose = ">=3.3.0,<4.0.0" -requests = ">=2.20.0,<3.0.0" -requests-toolbelt = ">=0.9.1,<0.10.0" -urllib3 = ">=1.26.0,<2.0.0" - -[package.extras] -docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"] - -[[package]] -name = "pytz" -version = "2022.5" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pytz-deprecation-shim" -version = "0.1.0.post0" -description = "Shims to make deprecation of pytz easier" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -tzdata = {version = "*", markers = "python_version >= \"3.6\""} - [[package]] name = "pyupgrade" -version = "3.1.0" +version = "3.3.1" description = "A tool to automatically upgrade syntax for newer versions." category = "dev" optional = false @@ -1373,7 +372,7 @@ tokenize-rt = ">=3.2.0" name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -1388,47 +387,6 @@ python-versions = ">=3.7" [package.dependencies] classify-imports = ">=4.1" -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-toolbelt" -version = "0.9.1" -description = "A utility belt for advanced users of python-requests" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "restrictedpython" -version = "6.0" -description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." -category = "main" -optional = false -python-versions = ">=3.6, <3.12" - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-mock"] - [[package]] name = "restructuredtext-lint" version = "1.4.0" @@ -1440,17 +398,6 @@ python-versions = "*" [package.dependencies] docutils = ">=0.11,<1.0" -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -category = "main" -optional = false -python-versions = ">=3.6,<4" - -[package.dependencies] -pyasn1 = ">=0.1.3" - [[package]] name = "ruamel-yaml" version = "0.17.21" @@ -1463,87 +410,19 @@ python-versions = ">=3" docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] -[[package]] -name = "safety" -version = "2.3.1" -description = "Checks installed dependencies for known vulnerabilities and licenses." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -Click = ">=8.0.2" -dparse = ">=0.6.2" -packaging = ">=21.0" -requests = "*" -"ruamel.yaml" = ">=0.17.21" -setuptools = ">=19.3" - -[package.extras] -github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] -gitlab = ["python-gitlab (>=1.3.0)"] - -[[package]] -name = "sentry-sdk" -version = "1.10.1" -description = "Python client for Sentry (https://sentry.io)" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] -httpx = ["httpx (>=0.16.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -tornado = ["tornado (>=5)"] - [[package]] name = "setuptools" -version = "65.5.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "simplejson" -version = "3.17.6" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - [[package]] name = "smmap" version = "5.0.0" @@ -1556,257 +435,13 @@ python-versions = ">=3.6" name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "main" +category = "dev" optional = false python-versions = "*" -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] - -[[package]] -name = "sphinx-autoapi" -version = "2.0.0" -description = "Sphinx API documentation generator" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -astroid = ">=2.7" -Jinja2 = "*" -PyYAML = "*" -sphinx = ">=4.0" -unidecode = "*" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -dotnet = ["sphinxcontrib-dotnetdomain"] -go = ["sphinxcontrib-golangdomain"] - -[[package]] -name = "sphinx-autobuild" -version = "2021.3.14" -description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = "*" -livereload = "*" -sphinx = "*" - -[package.extras] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "sphinx-basic-ng" -version = "1.0.0b1" -description = "A modern skeleton for Sphinx themes." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -sphinx = ">=4.0" - -[package.extras] -docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] - -[[package]] -name = "sphinx-click" -version = "4.4.0" -description = "Sphinx extension that automatically documents click applications" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=7.0" -docutils = "*" -sphinx = ">=2.0" - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "SpiffWorkflow" -version = "1.2.1" -description = "A workflow framework and BPMN/DMN Processor" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.dependencies] -celery = "*" -configparser = "*" -lxml = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" - -[[package]] -name = "sqlalchemy" -version = "1.4.42" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-stubs" -version = "0.4" -description = "" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.dependencies] -mypy = ">=0.790" -typing-extensions = ">=3.7.4" - -[package.source] -type = "git" -url = "https://github.com/burnettk/sqlalchemy-stubs.git" -reference = "scoped-session-delete" -resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" - [[package]] name = "stevedore" -version = "4.1.0" +version = "5.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1815,17 +450,6 @@ python-versions = ">=3.8" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" -[[package]] -name = "swagger-ui-bundle" -version = "0.0.9" -description = "swagger_ui_bundle - swagger-ui files in a pip package" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Jinja2 = ">=2.0" - [[package]] name = "tokenize-rt" version = "5.0.0" @@ -1834,14 +458,6 @@ category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - [[package]] name = "tomli" version = "2.0.1" @@ -1850,362 +466,68 @@ category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" - -[[package]] -name = "typeguard" -version = "2.13.3" -description = "Run-time type checker for Python" -category = "dev" -optional = false -python-versions = ">=3.5.3" - -[package.extras] -doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["mypy", "pytest", "typing-extensions"] - -[[package]] -name = "types-click" -version = "7.1.8" -description = "Typing stubs for click" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-flask" -version = "1.1.6" -description = "Typing stubs for Flask" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-click = "*" -types-Jinja2 = "*" -types-Werkzeug = "*" - -[[package]] -name = "types-jinja2" -version = "2.11.9" -description = "Typing stubs for Jinja2" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-MarkupSafe = "*" - -[[package]] -name = "types-markupsafe" -version = "1.1.10" -description = "Typing stubs for MarkupSafe" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-pytz" -version = "2022.5.0.0" -description = "Typing stubs for pytz" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-pyyaml" -version = "6.0.12.1" -description = "Typing stubs for PyYAML" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-requests" -version = "2.28.11.2" -description = "Typing stubs for requests" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-urllib3" -version = "1.26.25.1" -description = "Typing stubs for urllib3" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-werkzeug" -version = "1.0.9" -description = "Typing stubs for Werkzeug" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tzdata" -version = "2022.5" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" - -[[package]] -name = "tzlocal" -version = "4.2" -description = "tzinfo object for the local timezone" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytz-deprecation-shim = "*" -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] -test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] - -[[package]] -name = "unidecode" -version = "1.3.6" -description = "ASCII transliterations of Unicode text" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "urllib3" -version = "1.26.12" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "vine" -version = "5.0.0" -description = "Promises, promises, promises." -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "virtualenv" -version = "20.16.6" +version = "20.21.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "werkzeug" -version = "2.2.2" -description = "The comprehensive WSGI web application library." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "wtforms" -version = "3.0.1" -description = "Form validation and rendering for Python web development." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = "*" - -[package.extras] -email = ["email-validator"] - -[[package]] -name = "xdoctest" -version = "1.1.0" -description = "A rewrite of the builtin doctest module" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} -Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} -six = "*" - -[package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] -colors = ["Pygments", "Pygments", "colorama"] -jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] -optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] -optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] -runtime-strict = ["six (==1.11.0)"] -tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] -tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.11,<3.12" -content-hash = "218d9e84c83ac2b9953fa5e18ee39879d2573fc749900887851be6d9ec32e63d" +content-hash = "b47d05a3bedc167232bba9ab07c2c770574018e949d7eb87c65a95a2df84d76b" [metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -alembic = [ - {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, - {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, -] -amqp = [ - {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, - {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, -] -aniso8601 = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, -] -apscheduler = [ - {file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"}, - {file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"}, -] -astroid = [ - {file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"}, - {file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"}, -] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] bandit = [ {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, ] -bcrypt = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -billiard = [ - {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, - {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, -] black = [ - {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, - {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, - {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, - {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, - {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, - {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, - {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, - {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, - {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, - {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, - {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, - {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, -] -blinker = [ - {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, - {file = "blinker-1.5.tar.gz", hash = "sha256:923e5e2f69c155f2cc42dafbbd70e16e3fde24d2d4aa2ab72fbe386238892462"}, -] -celery = [ - {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, - {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] classify-imports = [ {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, @@ -2214,90 +536,10 @@ click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -click-didyoumean = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, -] -click-plugins = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] -click-repl = [ - {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, - {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, -] -clickclick = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] colorama = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -configparser = [ - {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, - {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, -] -connexion = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -darglint = [ - {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, - {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, -] distlib = [ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, @@ -2306,17 +548,9 @@ docutils = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] -dparse = [ - {file = "dparse-0.6.2-py3-none-any.whl", hash = "sha256:8097076f1dd26c377f30d4745e6ec18fef42f3bf493933b842ac5bafad8c345f"}, - {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, -] -ecdsa = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, -] filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"}, + {file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -2326,12 +560,12 @@ flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, - {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, ] flake8-docstrings = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, @@ -2341,539 +575,73 @@ flake8-rst-docstrings = [ {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, ] -flask = [ - {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, - {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, -] -flask-admin = [ - {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, -] -flask-bcrypt = [ - {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, - {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, -] -flask-bpmn = [] -flask-cors = [ - {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, - {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, -] -flask-mail = [ - {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, -] -flask-marshmallow = [ - {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, - {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, -] -flask-migrate = [ - {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, - {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, -] -flask-restful = [ - {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, - {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, -] -flask-sqlalchemy = [ - {file = "Flask-SQLAlchemy-3.0.2.tar.gz", hash = "sha256:16199f5b3ddfb69e0df2f52ae4c76aedbfec823462349dabb21a1b2e0a2b65e9"}, - {file = "Flask_SQLAlchemy-3.0.2-py3-none-any.whl", hash = "sha256:7d0cd9cf73e64a996bb881a1ebd01633fc5a6d11c36ea27f7b5e251dc45476e7"}, -] -furo = [ - {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, - {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, -] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -greenlet = [ - {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, - {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, - {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, - {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, - {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, - {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, - {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, - {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, - {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, - {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, - {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, - {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, - {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, - {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, - {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, - {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, - {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, - {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, - {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, - {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, - {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, - {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, - {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, - {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, -] -gunicorn = [ - {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, - {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] identify = [ - {file = "identify-2.5.7-py2.py3-none-any.whl", hash = "sha256:7a67b2a6208d390fd86fd04fb3def94a3a8b7f0bcbd1d1fcd6736f4defe26390"}, - {file = "identify-2.5.7.tar.gz", hash = "sha256:5b8fd1e843a6d4bf10685dd31f4520a7f1c7d0e14e9bc5d34b1d6f111cabc011"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -inflection = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -itsdangerous = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jsonschema = [ - {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, - {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, -] -kombu = [ - {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, - {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, -] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, - {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, - {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, - {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, -] -livereload = [ - {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, - {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, -] -lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, -] -mako = [ - {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, - {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -marshmallow = [ - {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, - {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, -] -marshmallow-enum = [ - {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, - {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, -] -marshmallow-sqlalchemy = [ - {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, - {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, + {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"}, + {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, -] mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -mysql-connector-python = [ - {file = "mysql-connector-python-8.0.31.tar.gz", hash = "sha256:0fbe8f5441ad781b4f65c54a10ac77c6a329591456607e042786528599519636"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e271d8de00d5e9f9bd4b212c8e23d2986dead0f20379010f3b274a3e24cbfcb"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f3ee04a601f9cb90ace9618bbe2fa8e5bb59be3eb0c2bd8a5405fe69e05e446b"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_i686.whl", hash = "sha256:f89b7a731885b8a04248e4d8d124705ca836f0ddd3b7cf0c789e21f4b32810ed"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:48eb34f4e69a2fba56f310de6682862a15d46cd2bd51ee6eebc3a244e4ee0aa6"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:a570a72e0015b36b9c0775ae27c1d4946225f02f62129d16a14e9d77a38c0717"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7ac859a52486ac319e37f61469bbb9023faef38018223efa74e953f1fe23d36"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:79d6a6e8ce955df5ca0786cb8ed8fbd999745c9b50def89993a2a0f4732de721"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_i686.whl", hash = "sha256:e60426af313dcd526028d018d70757a82c5cc0673776b2a614e2180b5970feed"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:d0ca1ba3e5fb2f2cddcf271c320cd5c368f8d392c034ddab7a1c8dfd19510351"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:a1d8c1509c740649f352400d50360185e5473371507bb6498ceda0c6e877920c"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:447847396d1b51edd9cfe05a8c5ba82836d8ea4866f25f36a836cab322fdc4f0"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5e01a2f50378c13407a32e40dd4d225cfee5996d9d11968f76720ec28aa45421"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ac85883ec3b3a9a0e36cacc89b8f5e666206842c432a5f69b09a7687ddf51d4a"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:28cb3667be64ebfbd3d477bbd2c71e50d48bd5ed7ba2072dd460ae886d27e88e"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30f4542d4d20357c79604e6bf1a801e71dfc45c759c22b502ca5aa8122c3e859"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:e9e5ad544adfc82ffbda2c74685c8c953bce2e212c56f117020079f05e2c68b2"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_i686.whl", hash = "sha256:744c976569e81eecce5e8c7e8f80df2a1c3f64414829addc69c64aef8f56d091"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17d6ea22dacca7fa78a73a81f2b186d4c5c6e70b7be314e352526654e9ba4713"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:ae1b3d03802474a161cce8a97024484d18bef43b86d20114908cbc263817cade"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:746df133c677fbe4687da33aad5a711abdd9bd2277bbc350e20f903f07c81ef5"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4d75e6c3a7f18004e8279cbd9f5edc70089d6aaf3cb64374e21098d9bf0b93c4"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8ad0d08f3f7c9e48d6d102c7de718e5e44f630f916ff2f4b4ff8a3756b5d10ac"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:02526f16eacc3961ff681c5c8455d2306a9b45124f2f012ca75a1eac9ceb5165"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:b2bbf443f6346e46c26a3e91dd96a428a1038f2d3c5e466541078479c64a1833"}, - {file = "mysql_connector_python-8.0.31-py2.py3-none-any.whl", hash = "sha256:9be9c4dcae987a2a3f07b2ad984984c24f90887dbfab3c8a971e631ad4ca5ccf"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] nodeenv = [ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -pep8-naming = [ - {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, - {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] pre-commit = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] pre-commit-hooks = [ - {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, - {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, - {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, -] -protobuf = [ - {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, - {file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"}, - {file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"}, - {file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"}, - {file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"}, - {file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"}, - {file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"}, - {file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"}, - {file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"}, - {file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"}, - {file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"}, - {file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"}, - {file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"}, - {file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"}, - {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, - {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, -] -psycopg2 = [ - {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, - {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, - {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, - {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, - {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, - {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, - {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, - {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, - {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, -] -pyasn1 = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pre_commit_hooks-4.4.0-py2.py3-none-any.whl", hash = "sha256:fc8837335476221ccccda3d176ed6ae29fe58753ce7e8b7863f5d0f987328fc6"}, + {file = "pre_commit_hooks-4.4.0.tar.gz", hash = "sha256:7011eed8e1a25cde94693da009cba76392194cecc2f3f06c51a44ea6ad6c2af9"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pydocstyle = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyjwt = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-flask = [ - {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, - {file = "pytest_flask-1.2.0-py3-none-any.whl", hash = "sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9"}, -] -pytest-flask-sqlalchemy = [ - {file = "pytest-flask-sqlalchemy-1.1.0.tar.gz", hash = "sha256:db71a57b90435e5d854b21c37a2584056d6fc3ddb28c09d8d0a2546bd6e390ff"}, - {file = "pytest_flask_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:b9f272d5c4092fcbe4a6284e402a37cad84f5b9be3c0bbe1a11927f24c99ff83"}, -] -pytest-mock = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, -] -python-jose = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] -python-keycloak = [ - {file = "python-keycloak-2.6.0.tar.gz", hash = "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96"}, - {file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"}, -] -pytz = [ - {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, - {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, -] -pytz-deprecation-shim = [ - {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, - {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pyupgrade = [ - {file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"}, - {file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"}, + {file = "pyupgrade-3.3.1-py2.py3-none-any.whl", hash = "sha256:3b93641963df022d605c78aeae4b5956a5296ea24701eafaef9c487527b77e60"}, + {file = "pyupgrade-3.3.1.tar.gz", hash = "sha256:f88bce38b0ba92c2a9a5063c8629e456e8d919b67d2d42c7ecab82ff196f9813"}, ] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, @@ -2921,107 +689,16 @@ reorder-python-imports = [ {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, ] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -requests-toolbelt = [ - {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, - {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, -] -restrictedpython = [ - {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"}, - {file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"}, -] restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -safety = [ - {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, - {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, -] -sentry-sdk = [ - {file = "sentry-sdk-1.10.1.tar.gz", hash = "sha256:105faf7bd7b7fa25653404619ee261527266b14103fe1389e0ce077bd23a9691"}, - {file = "sentry_sdk-1.10.1-py2.py3-none-any.whl", hash = "sha256:06c0fa9ccfdc80d7e3b5d2021978d6eb9351fa49db9b5847cf4d1f2a473414ad"}, -] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, -] -simplejson = [ - {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, - {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, - {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, - {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, - {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, - {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, - {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, - {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, - {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, - {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, - {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, - {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, - {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, - {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, @@ -3031,279 +708,19 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -sphinx = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, -] -sphinx-autoapi = [ - {file = "sphinx-autoapi-2.0.0.tar.gz", hash = "sha256:97dcf1b5b54cd0d8efef867594e4a4f3e2d3a2c0ec1e5a891e0a61bc77046006"}, - {file = "sphinx_autoapi-2.0.0-py2.py3-none-any.whl", hash = "sha256:dab2753a38cad907bf4e61473c0da365a26bfbe69fbf5aa6e4f7d48e1cf8a148"}, -] -sphinx-autobuild = [ - {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, - {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, -] -sphinx-basic-ng = [ - {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"}, - {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, -] -sphinx-click = [ - {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, - {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -SpiffWorkflow = [] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, - {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, -] -sqlalchemy-stubs = [] stevedore = [ - {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"}, - {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"}, -] -swagger-ui-bundle = [ - {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, - {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, + {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, + {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] tokenize-rt = [ {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, ] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -tornado = [ - {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, - {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, - {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, - {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, - {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, -] -typeguard = [ - {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, - {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, -] -types-click = [ - {file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"}, - {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, -] -types-flask = [ - {file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"}, - {file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"}, -] -types-jinja2 = [ - {file = "types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81"}, - {file = "types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2"}, -] -types-markupsafe = [ - {file = "types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1"}, - {file = "types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5"}, -] -types-pytz = [ - {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, - {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, -] -types-pyyaml = [ - {file = "types-PyYAML-6.0.12.1.tar.gz", hash = "sha256:70ccaafcf3fb404d57bffc1529fdd86a13e8b4f2cf9fc3ee81a6408ce0ad59d2"}, - {file = "types_PyYAML-6.0.12.1-py3-none-any.whl", hash = "sha256:aaf5e51444c13bd34104695a89ad9c48412599a4f615d65a60e649109714f608"}, -] -types-requests = [ - {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, - {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, -] -types-urllib3 = [ - {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, - {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, -] -types-werkzeug = [ - {file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"}, - {file = "types_Werkzeug-1.0.9-py3-none-any.whl", hash = "sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -tzdata = [ - {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, - {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, -] -tzlocal = [ - {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, - {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, -] -unidecode = [ - {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, - {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -vine = [ - {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, - {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, -] virtualenv = [ - {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"}, - {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -werkzeug = [ - {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, - {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, -] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] -wtforms = [ - {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, - {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, -] -xdoctest = [ - {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, - {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, ] diff --git a/pyproject.toml b/pyproject.toml index 5448c54c..86d868db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,71 +13,8 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.11,<3.12" -click = "^8.0.1" -flask = "2.2.2" -flask-admin = "*" -flask-bcrypt = "*" -flask-cors = "*" -flask-mail = "*" -flask-marshmallow = "*" -flask-migrate = "*" -flask-restful = "*" -werkzeug = "*" -# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} -# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"} -# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"} -sentry-sdk = "^1.10" -sphinx-autoapi = "^2.0" -# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"} -# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"} -flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} -mysql-connector-python = "^8.0.29" -pytest-flask = "^1.2.0" -pytest-flask-sqlalchemy = "^1.1.0" -psycopg2 = "^2.9.3" -typing-extensions = "^4.4.0" -connexion = {extras = [ "swagger-ui",], version = "^2"} -lxml = "^4.9.1" -marshmallow-enum = "^1.5.1" -marshmallow-sqlalchemy = "^0.28.0" -PyJWT = "^2.6.0" -gunicorn = "^20.1.0" -python-keycloak = "^2.5.0" -APScheduler = "^3.9.1" -Jinja2 = "^3.1.2" -RestrictedPython = "^6.0" -Flask-SQLAlchemy = "^3" - -# type hinting stuff -# these need to be in the normal (non dev-dependencies) section -# because if not then poetry export won't have them and nox -s mypy --pythons 3.10 -# will fail -types-Werkzeug = "^1.0.9" -types-PyYAML = "^6.0.12" -types-Flask = "^1.1.6" -types-requests = "^2.28.6" -types-pytz = "^2022.1.1" - -# https://github.com/dropbox/sqlalchemy-stubs/pull/251 -# someday get off github -# sqlalchemy-stubs = "^0.4" -# sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" } -# sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"} -# for now use my fork -sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } -simplejson = "^3.17.6" - [tool.poetry.dev-dependencies] -pytest = "^7.1.2" -coverage = {extras = ["toml"], version = "^6.1"} -safety = "^2.3.1" -mypy = ">=0.961" -typeguard = "^2.13.2" -xdoctest = {extras = ["colors"], version = "^1.0.1"} -sphinx = "^5.0.2" -sphinx-autobuild = ">=2021.3.14" pre-commit = "^2.20.0" flake8 = "^4.0.1" black = ">=21.10b0" @@ -89,71 +26,9 @@ bandit = "1.7.2" flake8-bugbear = "^22.10.25" flake8-docstrings = "^1.6.0" flake8-rst-docstrings = "^0.2.7" -# flask-sqlalchemy-stubs = "^0.2" -pep8-naming = "^0.13.2" -darglint = "^1.8.1" reorder-python-imports = "^3.9.0" pre-commit-hooks = "^4.0.1" -sphinx-click = "^4.3.0" -Pygments = "^2.10.0" pyupgrade = "^3.1.0" -furo = ">=2021.11.12" - -[tool.poetry.scripts] -spiffworkflow-backend = "spiffworkflow_backend.__main__:main" [tool.poetry.group.dev.dependencies] tomli = "^2.0.1" - -[tool.pytest.ini_options] -# ignore deprecation warnings from various packages that we don't control -filterwarnings = [ - # note the use of single quote below to denote "raw" strings in TOML - # kombu/utils/compat.py:82 - 'ignore:SelectableGroups dict interface is deprecated. Use select.', - # flask_marshmallow/__init__.py:34 - # marshmallow_sqlalchemy/convert.py:17 - 'ignore:distutils Version classes are deprecated. Use packaging.version instead.', - # connexion/spec.py:50 - 'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release', - # connexion/decorators/validation.py:16 - 'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.', - # connexion/apis/flask_api.py:236 - "ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3", - "ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3", - "ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3", - "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3" -] - -[tool.coverage.paths] -source = ["src", "*/site-packages"] -tests = ["tests", "*/tests"] - -[tool.coverage.run] -branch = true -source = ["spiffworkflow_backend", "tests"] - -[tool.coverage.report] -show_missing = true -fail_under = 50 - -[tool.mypy] -strict = true -disallow_any_generics = false -warn_unreachable = true -pretty = true -show_column_numbers = true -show_error_codes = true -show_error_context = true -plugins = "sqlmypy" - -# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option -implicit_reexport = true - -# allow for subdirs to NOT require __init__.py -namespace_packages = true -explicit_package_bases = false - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" diff --git a/spiffworkflow-backend/bin/login_with_users b/spiffworkflow-backend/bin/login_with_users index 167c57f9..3883f046 100755 --- a/spiffworkflow-backend/bin/login_with_users +++ b/spiffworkflow-backend/bin/login_with_users @@ -28,7 +28,11 @@ REALM_NAME=${2-spiffworkflow} while read -r input_line; do if ! grep -qE '(^#|email)' <<<"$input_line" ; then username=$(awk -F '@' '{print $1}' <<<"$input_line") - access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME" || echo '') + password=$(awk -F ',' '{print $2}' <<<"$input_line") + if [[ -z "$password" ]]; then + password="$username" + fi + access_token=$("${script_dir}/get_token" "$username" "$password" "$REALM_NAME" || echo '') if [[ -z "$access_token" || "$access_token" == "null" ]]; then >&2 echo "ERROR: failed to get access token for '$username'" else diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 3d8d3db2..14b23cf8 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -44,6 +44,17 @@ if [[ "${1:-}" == "clean" ]]; then # TODO: check to see if the db already exists and we can connect to it. also actually clean it up. # start postgres in background with one db if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then + container_name="postgres-spiff" + container_regex="^postgres-spiff$" + if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then + echo ":: Found postgres container - $container_name" + if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then + echo ":: Stopping running container - $container_name" + docker stop $container_name + fi + echo ":: Removing stopped container - $container_name" + docker rm $container_name + fi if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres sleep 4 # classy diff --git a/spiffworkflow-backend/bin/run_process_model_with_api b/spiffworkflow-backend/bin/run_process_model_with_api index f8b3a6c7..a0ab07bc 100755 --- a/spiffworkflow-backend/bin/run_process_model_with_api +++ b/spiffworkflow-backend/bin/run_process_model_with_api @@ -27,6 +27,15 @@ fi modified_process_model_identifier=$(tr '/' ':' <<<"$process_model_identifier") +function check_result_for_error() { + local result="$1" + error_code=$(jq '.error_code' <<<"$result") + if [[ -n "$error_code" && "$error_code" != "null" ]]; then + >&2 echo "ERROR: Failed to run process instance. Received error: $result" + exit 1 + fi +} + function process_next_task() { local next_task="$1" @@ -37,6 +46,7 @@ function process_next_task() { if grep -qE "Manual ?Task" <<<"$task_type" && [[ "${task_state}" == "READY" ]]; then next_task=$(curl --silent -X PUT "${BACKEND_BASE_URL}/v1.0/tasks/${process_instance_id}/${task_guid}" -H "Authorization: Bearer $access_token") + check_result_for_error "$next_task" process_next_task "$next_task" elif [[ "$(jq '.ok' <<<"$next_task")" == "null" ]]; then echo -e "\n\nThe next task is not a Manual Task and requires user input. It must be completed manually." @@ -48,12 +58,13 @@ function process_next_task() { access_token=$("${script_dir}/get_token" "$username" "$password" "$realm_name") curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token" >/dev/null result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}" -H "Authorization: Bearer $access_token") -process_instance_id=$(jq '.id' <<<"$result") +process_instance_id=$(jq -r '.id' <<<"$result") if ! grep -qE '^[0-9]+$' <<<"$process_instance_id"; then >&2 echo "ERROR: Did not receive valid process instance id when instantiating process model. result was ${result}" exit 1 fi result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token") +check_result_for_error "$result" next_task=$(jq '.next_task' <<<"$result") process_next_task "$next_task" diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index 304008d0..df002ff4 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -19,8 +19,6 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService -# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec - # We need to call this before importing spiffworkflow_backend # otherwise typeguard cannot work. hence the noqa: E402 @@ -47,7 +45,8 @@ def app() -> Flask: def with_db_and_bpmn_file_cleanup() -> None: """Do it cleanly!""" meta = db.metadata - db.session.execute(db.update(BpmnProcessModel, values={"parent_process_id": None})) + db.session.execute(db.update(BpmnProcessModel).values(top_level_process_id=None)) + db.session.execute(db.update(BpmnProcessModel).values(direct_parent_process_id=None)) for table in reversed(meta.sorted_tables): db.session.execute(table.delete()) diff --git a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak index 4196add0..f6a39aae 100755 --- a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak +++ b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak @@ -54,9 +54,10 @@ backend_token=$(jq -r '.access_token' <<< "$result") function add_user() { local user_email=$1 local username=$2 - local user_attribute_one=$3 + local pass=$3 + local user_attribute_one=$4 - local credentials='{"type":"password","value":"'"${username}"'","temporary":false}' + local credentials='{"type":"password","value":"'"${pass}"'","temporary":false}' local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']' if [[ -n "$user_attribute_one" ]]; then @@ -79,18 +80,31 @@ while read -r input_line; do if ! grep -qE '^#' <<<"$input_line" ; then if [[ "$first_line_processed" == "false" ]]; then email_header=$(awk -F ',' '{print $1}' <<<"$input_line") + pass_header=$(awk -F ',' '{print $2}' <<<"$input_line") if [[ "$email_header" != "email" ]]; then >&2 echo "ERROR: the first column in the first row must be email." exit 1 fi - custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") + if [[ "$pass_header" != "pass" ]]; then + >&2 echo "ERROR: the first column in the first row must be pass." + exit 1 + fi + custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line") first_line_processed="true" elif [[ -n "$input_line" ]]; then echo "Importing: $input_line" user_email=$(awk -F ',' '{print $1}' <<<"$input_line") username=$(awk -F '@' '{print $1}' <<<"$user_email") - user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") - http_code=$(add_user "$user_email" "$username" "$user_attribute_one") + + if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then + >&2 echo "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}" + exit 1 + fi + + password=$(awk -F ',' '{print $2}' <<<"$input_line") + echo "Password: $password" + user_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line") + http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one") if [[ "$http_code" == "409" ]]; then user_info=$(curl --fail --silent --location --request GET "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \ @@ -106,7 +120,7 @@ while read -r input_line; do -H 'Content-Type: application/json' \ -H "Authorization: Bearer $backend_token" - http_code=$(add_user "$user_email" "$username" "$user_attribute_one") + http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one") fi if [[ "$http_code" != "201" ]]; then >&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}" diff --git a/spiffworkflow-backend/keycloak/bin/start_keycloak b/spiffworkflow-backend/keycloak/bin/start_keycloak index 242a3375..53f6f17d 100755 --- a/spiffworkflow-backend/keycloak/bin/start_keycloak +++ b/spiffworkflow-backend/keycloak/bin/start_keycloak @@ -26,9 +26,10 @@ fi # https://stackoverflow.com/a/60579344/6090676 container_name="keycloak" -if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then +container_regex="^keycloak$" +if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then echo ":: Found container - $container_name" - if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then + if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then echo ":: Stopping running container - $container_name" docker stop $container_name fi diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 99e651b9..9bacd506 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -807,213 +807,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "3730e6ec-4b0c-4fbe-a34b-2cd43d8c9854", - "createdTimestamp" : 1678461819329, - "username" : "core10.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core10.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "225" ] - }, - "credentials" : [ { - "id" : "223cbe3b-d432-4707-b826-6220caa14bd7", - "type" : "password", - "createdDate" : 1678461819366, - "secretData" : "{\"value\":\"Mp81SeHhDQa2U/i/S2CfPnKvjwRDJCKZMgCQX3BkZWE/a6791XjXmwB8DE5qS8tiST68BQoQRuc1VCiNKL3zaQ==\",\"salt\":\"Jb0BB2tIQ+HUJQIFr82g9w==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "88e7ca9e-1825-4d4a-9f60-29368023c67b", - "createdTimestamp" : 1678461819411, - "username" : "core11.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core11.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "226" ] - }, - "credentials" : [ { - "id" : "46dc7656-b70b-4d86-80fc-aa08d807be2b", - "type" : "password", - "createdDate" : 1678461819447, - "secretData" : "{\"value\":\"hgBEI05fhPMVx47O9KmnrTvPomKJXK0IjEHZ30zM3fu6maT2fOHGh4+ti6MVhKqQeXKZR4wtC3i1RoqLNOsjpQ==\",\"salt\":\"BWxZnmTfzggGqzVKkFY+vQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "6504eeda-be24-488b-ace4-1d50a7a354bc", - "createdTimestamp" : 1678461819494, - "username" : "core12.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core12.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "227" ] - }, - "credentials" : [ { - "id" : "bde05120-10b5-4796-b559-9238847d2604", - "type" : "password", - "createdDate" : 1678461819527, - "secretData" : "{\"value\":\"njdHu9w1jeSvaNbdwVf0X+3TZaHmZVwUc+/TOAtv05eNGBIW9Vt1+500AsLReHS8lb/I3fglr5I9ZskYHUc0fA==\",\"salt\":\"lH6xJHf1jQGX1j4bYH6GXA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "ed249cd3-c66e-46e0-9184-1e6468b57afa", - "createdTimestamp" : 1678461819557, - "username" : "core13.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core13.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "228" ] - }, - "credentials" : [ { - "id" : "81b65ee8-6fcd-4cd6-8886-aa44feefa55f", - "type" : "password", - "createdDate" : 1678461819592, - "secretData" : "{\"value\":\"ywBsPI0pdoCOjNWinYNZQBBzL3NRp2u2jv3aXBGxneTo9v8XaVweGL52HIyTikdfmX46TEMIH6LQopaYFcwhng==\",\"salt\":\"GTw17rcE4UvB/Dx4UUkAog==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "1b7b3aa4-b0fe-46c7-a9a1-3fb3c99c7576", - "createdTimestamp" : 1678461819624, - "username" : "core14.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core14.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "229" ] - }, - "credentials" : [ { - "id" : "0c24ffe5-cb97-4b0d-a0d1-920de540742e", - "type" : "password", - "createdDate" : 1678461819658, - "secretData" : "{\"value\":\"3RXjoEUpqxH6RM0sZUf393H9nzyVADId8IWNru9fWgdQg6tHaZezRBZ/lRRERvvdmLiupQ3cMsL/HHvPRQA6tA==\",\"salt\":\"zkaBJY+Dvg5Az74MACBBUg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "8e2b39a8-a744-4345-928f-da1a36f15f46", - "createdTimestamp" : 1678461819686, - "username" : "core15.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core15.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "230" ] - }, - "credentials" : [ { - "id" : "14a91e80-cec9-44cf-aa85-28e0043f660d", - "type" : "password", - "createdDate" : 1678461819720, - "secretData" : "{\"value\":\"JnP9MpLDM92LuzJnEVUy0vzm9LoSttezepYu4ANfJlmcS6cUvnnh1yDKm43I2YzM4+mXRdxJyoLZTk/ZpmshSQ==\",\"salt\":\"5CKz6mrqr4IaUeEuu/hR9Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "ffe3e131-9479-49d2-8125-83dc86a16478", - "createdTimestamp" : 1678461819751, - "username" : "core16.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core16.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "231" ] - }, - "credentials" : [ { - "id" : "cf010c6c-035e-4a2f-ab74-5617fd23c808", - "type" : "password", - "createdDate" : 1678461819786, - "secretData" : "{\"value\":\"WeZ+YxLVtjRhlLZnb6j3AfecmQEsvTm3iM8ZqQthgq9c4BuZ23qare3PEVlRCA1+Oj5sAOOS1hs9iab6ia49wQ==\",\"salt\":\"uai22Okju4dg7GfO7p3C1Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "94bcef08-2af1-4805-864d-cbabcd851d67", - "createdTimestamp" : 1678461819815, - "username" : "core17.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core17.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "232" ] - }, - "credentials" : [ { - "id" : "c7a58ff0-7c56-464b-9009-b6e845075087", - "type" : "password", - "createdDate" : 1678461819850, - "secretData" : "{\"value\":\"R53+DKM2eyUXDYJDjW9BtwdY+x0/CUhgUDDYjip7BvGAepzRqPvZVbCLqJjFf6YctO4Va7F65n4evd40GbO7fQ==\",\"salt\":\"U/ia7H+I4yeD3bpP1vnH6Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "7b86b997-de98-478c-8550-cfca65e40c33", - "createdTimestamp" : 1679060366901, - "username" : "core18.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core18.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "233" ] - }, - "credentials" : [ { - "id" : "55ca2bd7-6f60-4f04-be21-df6300ca9442", - "type" : "password", - "createdDate" : 1679060366954, - "secretData" : "{\"value\":\"hC/O8LJ8/y/nXLmRFgRazOX9PXMHkowYH1iHUB4Iw9jzc8IMMv8dFrxu7XBklfyz7CPc1bmgl0k29jygRZYHlg==\",\"salt\":\"4R17tmLrHWyFAMvrfLMETQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c", "createdTimestamp" : 1676302140358, @@ -1107,8 +900,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "5119e7f6-9b0f-4e04-824a-9c5ef87fdb42", - "createdTimestamp" : 1678126023934, + "id" : "8c6cf190-66e3-4c8d-aa06-1b9972ecd982", + "createdTimestamp" : 1680538438437, "username" : "core6.contributor", "enabled" : true, "totp" : false, @@ -1118,79 +911,10 @@ "spiffworkflow-employeeid" : [ "199" ] }, "credentials" : [ { - "id" : "f219e401-0fdb-4b73-be77-d01bb0caa448", + "id" : "1dadc9a8-6f7d-4795-bcc7-2b9d8aacb54a", "type" : "password", - "createdDate" : 1678126023967, - "secretData" : "{\"value\":\"zdr8Psnlti56oHo8f/wuuZb5p7ZRpDQKHGFsrkjtl0VaOn2uNOeUmCqXLQ4UGyGssK8Qn8s8R62yrFKUNeeSjA==\",\"salt\":\"9MlVZL9xo3OWvlsvyXt0UQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "89d57569-1a90-412a-ba01-aa8ff19ed171", - "createdTimestamp" : 1678461819085, - "username" : "core7.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core7.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "222" ] - }, - "credentials" : [ { - "id" : "cfeb64ec-a38a-4f95-b0cd-28b5501524d8", - "type" : "password", - "createdDate" : 1678461819121, - "secretData" : "{\"value\":\"w4WKqWXTlin6MPQi0mO+Bvktb2zuMdIylqNNxYgBCnd5vwzq2widp7G9f3wz8Iy0wY8K2rqBjdSmmbZ7fJ8//Q==\",\"salt\":\"SRuRkx3572cDGoWhqAQGLQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "81efd609-b6ae-42ec-800e-d6fcca2f8282", - "createdTimestamp" : 1678461819150, - "username" : "core8.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core8.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "223" ] - }, - "credentials" : [ { - "id" : "0b476f6f-7aa4-4f75-bf5c-ac47521f3900", - "type" : "password", - "createdDate" : 1678461819185, - "secretData" : "{\"value\":\"ALWI40OEZUhMJ1CQTV9wSrwQUWfYNiYbN2JTmCUfbLUcUbY+rTrKOfAn9Mc/bCEFJomiTb9u/eqnkKX/lCGgew==\",\"salt\":\"wW2T8PkpCnnPfMNwpPVUVQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "a1233c9f-e59a-48dc-aaa7-1513f1aa5654", - "createdTimestamp" : 1678461819225, - "username" : "core9.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core9.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "224" ] - }, - "credentials" : [ { - "id" : "907b9d46-b8a3-4a14-ab89-b07d2c4d431a", - "type" : "password", - "createdDate" : 1678461819266, - "secretData" : "{\"value\":\"v9aFLHzLyiwWuAxNeVtRjtXzRtug6KU2f19SbS8dBdPC0mlHORoLYXy6VoAMdcTv8bfrW6e9iCgqWnXdXU6yMg==\",\"salt\":\"giVxblJWbFNNPiZZKxWYxg==\",\"additionalParameters\":{}}", + "createdDate" : 1680538438553, + "secretData" : "{\"value\":\"YbDgbKbiIjHB76RAJN7Q1AWYkdNvDMHUC1P3RJ6AV8ASEUr6fJ8U11WroIMmkiWs1TlewJi0mF4rWBsVkLzjlg==\",\"salt\":\"BbrA/rjtvxwrZAsS3BYARA==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -1528,29 +1252,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "d123d384-66a4-4db5-9dbb-d73c12047001", - "createdTimestamp" : 1678997616280, - "username" : "finance.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "finance.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "128" ] - }, - "credentials" : [ { - "id" : "b680f5c5-c2de-4255-9d23-7e18cff3ac4e", - "type" : "password", - "createdDate" : 1678997616336, - "secretData" : "{\"value\":\"4kasmb11Sv62rInh8eFUhS3rGYNymzsvxzfsEIWGYhnlisYuo1iTS2opv/kET/NyJlsYrfwc7yrIqSHvkUHkkA==\",\"salt\":\"q/ees3a4K+3K11olnfPzCQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "f6d2488a-446c-493b-bbe8-210ede6f3e42", "createdTimestamp" : 1674148694899, @@ -1661,8 +1362,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "4f3fadc8-f0a3-45fb-8710-c054385b866b", - "createdTimestamp" : 1676302141941, + "id" : "1a8cb2a3-09ec-4f24-9f5e-13bab170c4a9", + "createdTimestamp" : 1680210955180, "username" : "infra.project-lead", "enabled" : true, "totp" : false, @@ -1672,10 +1373,10 @@ "spiffworkflow-employeeid" : [ "130" ] }, "credentials" : [ { - "id" : "e422f671-1693-4469-8cdc-0ea7dcb27c66", + "id" : "1283acee-35b4-40cd-a1cb-9dd3c41dfd3c", "type" : "password", - "createdDate" : 1676302141975, - "secretData" : "{\"value\":\"gWFNRdQhmsN2IMyaZEHgTk8A0mna72VYfeWk7PX31MhBQjQIGsctuEKK3TNxiB046LM8ZiUntA59sTPBgouVeQ==\",\"salt\":\"AtU0bmAz1z4f7wh/Z/ru1Q==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955239, + "secretData" : "{\"value\":\"7wW+4snc/57IFEyCApWM7jwxJSLAlndSy/F3rSE0KOv/StS4HOByov02uDuTQ3h4CbW+zVp4+EqPFJiNWgf5WA==\",\"salt\":\"/BYeWVg0iy8Ou/YroWoeSw==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -1822,8 +1523,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "9a4d176c-e61e-4392-8c50-a04988606aa6", - "createdTimestamp" : 1678461818383, + "id" : "ec8a613d-de94-4696-910d-635ab0d90fc1", + "createdTimestamp" : 1680538439046, "username" : "infra6.sme", "enabled" : true, "totp" : false, @@ -1833,10 +1534,10 @@ "spiffworkflow-employeeid" : [ "212" ] }, "credentials" : [ { - "id" : "c381e58c-3e06-4e10-bd23-46f258c1c91f", + "id" : "59e02828-28cb-4555-9497-0b9f674ecd43", "type" : "password", - "createdDate" : 1678461818420, - "secretData" : "{\"value\":\"m17+awcU3Ezhfi/gBK0xyxvnGKHads95lhn7uxvEXaPCJF0ioN8C27tH1RwU1w9ptdWjWKWAM9dcimIegy7M7g==\",\"salt\":\"0kCljoos7qzCnVdv+3IMjQ==\",\"additionalParameters\":{}}", + "createdDate" : 1680538439110, + "secretData" : "{\"value\":\"DFa3Yz3ZRdFGmAFqiq6Sg+s673FFnjVGOzS/e4SnDAdv1JzavYka2QngSHDvZfi5bO7ecDE0+idwJP/vtcMjyQ==\",\"salt\":\"iSHEw6brz62W6RqGULCyug==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -1884,6 +1585,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "992c7cfb-377f-4d80-b399-edf218ad640e", + "createdTimestamp" : 1679595782179, + "username" : "jamescheung", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "jamescheung@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "234" ] + }, + "credentials" : [ { + "id" : "3e62811d-d294-4c2b-a681-3a93ea0f8bc2", + "type" : "password", + "createdDate" : 1679595782238, + "secretData" : "{\"value\":\"oFDel18kGBSpCvfrni1SSY2Ti3eJmYxCuwcar5PoBHECXISIbuz0t5i97COiXCI52vxSkorwl3c8r2j+77B2kw==\",\"salt\":\"tVvRYyNH4ktBXNjmfP6JtQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "2df44301-506a-4053-9ece-830d2b3c295b", "createdTimestamp" : 1676302142640, @@ -2031,8 +1755,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "6e9129f9-34f8-43bb-953b-de4156d425ba", - "createdTimestamp" : 1676302142894, + "id" : "7596232c-47bd-40db-bc0d-fbe984ebb22a", + "createdTimestamp" : 1680210955394, "username" : "legal.project-lead", "enabled" : true, "totp" : false, @@ -2042,10 +1766,10 @@ "spiffworkflow-employeeid" : [ "133" ] }, "credentials" : [ { - "id" : "b17d488c-7665-40d4-b758-c392ecc9e793", + "id" : "e379cc51-564f-4950-92dd-7fa18cff5d3b", "type" : "password", - "createdDate" : 1676302142929, - "secretData" : "{\"value\":\"FiEmNY1c+4xOepA3lzOzzaaNgthk9rMz1xXiV+5F2DUwBtoEqFRrlGTdHVVz5XjrcFhgW15+R3rSEfHsCLJTiA==\",\"salt\":\"xYYuuodywbhxqXcj3XMqKw==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955428, + "secretData" : "{\"value\":\"k+No1LvsqQmYTOQzuXN9oeVKne+FTCNAe4lZ4qVZq2M4pSRqKeySJWdtLYjxzHRfLufVpir6gXRCvs7ZiUL9GQ==\",\"salt\":\"XQ469z9b2a8Jw1IeZc9NaQ==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -2192,8 +1916,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "a368625b-b905-4e0d-83f6-dfe707b6320a", - "createdTimestamp" : 1678461818455, + "id" : "a8f54828-b188-41e6-80a6-920cab95f7db", + "createdTimestamp" : 1680538439162, "username" : "legal6.sme", "enabled" : true, "totp" : false, @@ -2203,56 +1927,10 @@ "spiffworkflow-employeeid" : [ "213" ] }, "credentials" : [ { - "id" : "53a21d32-1da5-45f1-a7d9-e45304b213d1", + "id" : "8e70e379-7974-40b6-ba31-08a1632a1a08", "type" : "password", - "createdDate" : 1678461818490, - "secretData" : "{\"value\":\"9zEoc1uV0QXsMvAS8lA1xdh4bOqcPdSAItg7zBFr5i+In/xOBtpRM0277nMgDNLtar4s+HRhytWgJ7OidVmjsw==\",\"salt\":\"ahEvQYvH0bHbT/uHz1I9QA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "e02e085f-eb50-4fe3-844c-24e41479ab47", - "createdTimestamp" : 1678461818523, - "username" : "legal7.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "legal7.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "214" ] - }, - "credentials" : [ { - "id" : "f5377236-8b0b-4be4-8dab-afb2c4a6470f", - "type" : "password", - "createdDate" : 1678461818557, - "secretData" : "{\"value\":\"dyQhBsrNeYHkbJudEjiay3duLFO9B66l0d+2L26S+/HMGuKfuI4NT+gju1MfQPVJhyC01FH7EmDGGS8I45i2jw==\",\"salt\":\"kU4NM5QOWvGSX+kVyvwSoA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "4de624bd-485f-49d5-817c-ba66c31be7a9", - "createdTimestamp" : 1678461818589, - "username" : "legal8.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "legal8.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "215" ] - }, - "credentials" : [ { - "id" : "5d71a02b-2f4b-484d-9125-a4454a17a800", - "type" : "password", - "createdDate" : 1678461818632, - "secretData" : "{\"value\":\"UH+hrjz9F+X0vQlbgzaFiZBA5uol9Lnjs1/5VpBnbWuISF6MAlxj2fmbnZbw4ILVSllaQvVSFaD4YUxbnRhUmw==\",\"salt\":\"MuAF2Rl7IOxOgZ7Xbqs3RQ==\",\"additionalParameters\":{}}", + "createdDate" : 1680538439219, + "secretData" : "{\"value\":\"Mwqt3FKuQ1q+OUpb8dIOOGwTKNmVuOCBnnJhSzFHUSa/9nrfWuL2GXCspHwPnMP4fF1eEXAg5B8SBC8cL/paEQ==\",\"salt\":\"o5Sj16r/DznxOzGJi6xJJg==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -2329,69 +2007,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "058b60f8-799e-48b0-a2b7-2e65e7a35724", - "createdTimestamp" : 1675718484672, - "username" : "mike", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "mike@sartography.com", - "credentials" : [ { - "id" : "669f5421-843d-411d-9f24-1be41e545e52", - "type" : "password", - "createdDate" : 1675718484715, - "secretData" : "{\"value\":\"YILRiRdrsy8CA716ZQazpQOf7mpiXGaYnR26ra3pSjmHkZS9tsePTRwU2OIGPwbN1LKJcIzrpfEP7cVW2Lm17w==\",\"salt\":\"7mfD1X7Hns/5pPgHb9uZ1Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "97843876-e1b6-469a-bab4-f9bce4aa5936", - "createdTimestamp" : 1678461819014, - "username" : "mobile.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "mobile.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "221" ] - }, - "credentials" : [ { - "id" : "96c00769-4348-4ad3-82c5-f34124602c17", - "type" : "password", - "createdDate" : 1678461819049, - "secretData" : "{\"value\":\"E7nVydRqQ+TZs54VmJcT4AjjtT1la7PmQbOnylqTPkkcOdLRmZbNTw/K429lOhqUHX7y1prC3OjGdY1VI8bjsg==\",\"salt\":\"D61yv2zS3Bi8epVKjRpWQw==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "9d23748e-23a7-4c48-956c-64da75871277", - "createdTimestamp" : 1675718484779, - "username" : "natalia", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "natalia@sartography.com", - "credentials" : [ { - "id" : "476024e5-62e4-48b6-afbb-cc2834fae4c7", - "type" : "password", - "createdDate" : 1675718484823, - "secretData" : "{\"value\":\"FfrpgES+XI2w4NRe1aBmolPFcERbEUDXZcFtUWucrbhBspQLYNaN2VLmeDRV0VcT47Bn8dqjU11ct64WDtffWA==\",\"salt\":\"7rZd3fqY54i1eoNyXCcZ1w==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "7f34beba-e1e1-458a-8d23-eb07d6e3800c", "createdTimestamp" : 1678126023154, @@ -2415,29 +2030,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "e8e67210-5088-46bc-97db-09dbcaf9de97", - "createdTimestamp" : 1678461818939, - "username" : "nomos.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "nomos.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "220" ] - }, - "credentials" : [ { - "id" : "8139f9b8-bad9-41d2-b3c6-589a2c11bf45", - "type" : "password", - "createdDate" : 1678461818975, - "secretData" : "{\"value\":\"6g5XIaFghMzx8CFYO6VJLGpUqBRiAEwFklZSI+uzJ5vrMsDvrcGjDuWtY+lmRO4lKqy30lBvqhMFvPT6pCxF3g==\",\"salt\":\"dT+XvwD+hxUwRAJCZFFYiA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "df72b3d2-07fd-4cb0-a447-a1c433db49d5", "createdTimestamp" : 1676302143785, @@ -2623,8 +2215,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "07f7a010-7542-4c2f-adf8-04b39433181d", - "createdTimestamp" : 1678461818663, + "id" : "b5bd1dc1-308d-4912-b3e4-92bf5fc45ed5", + "createdTimestamp" : 1680538439258, "username" : "peopleops.partner6.sme", "enabled" : true, "totp" : false, @@ -2634,10 +2226,10 @@ "spiffworkflow-employeeid" : [ "216" ] }, "credentials" : [ { - "id" : "867e9236-3a15-4198-b085-d36a7fa859e9", + "id" : "c719418c-b203-4056-9e19-43c5e87d1d43", "type" : "password", - "createdDate" : 1678461818713, - "secretData" : "{\"value\":\"kmQkAD459XkLCGaWWTr1rrwZYQ2gQ4k2xTroJZAyHmWvBBnKg+a74cRaW2Y3dnzcGTlcprtuMvwYVfq7HIOkmg==\",\"salt\":\"uKORqhpJJnceOf/q56BiSA==\",\"additionalParameters\":{}}", + "createdDate" : 1680538439300, + "secretData" : "{\"value\":\"pzmtPn2OllnAYKIIS2M38n0UFrtbkX5zN44DpI/PrzmnxRgT2TvlJmjCtxp5HRUi3lngT6Jdr3IvqpO5o93Y5g==\",\"salt\":\"1WKPI8ktFMZoLCAv2ir5+A==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -2646,77 +2238,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "5d41b5b7-bc3c-42fe-b20b-56a7c6cd3801", - "createdTimestamp" : 1678461818743, - "username" : "peopleops.partner7.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner7.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "217" ] - }, - "credentials" : [ { - "id" : "745d419f-c6de-4504-9c8e-c3f7b1ac747e", - "type" : "password", - "createdDate" : 1678461818778, - "secretData" : "{\"value\":\"myjshlqPW/3DpwC5X4vsAaqcsisdKwqr+CQXP18mt3AQMzqipHJaVAEAJzkZS4j42VB/XAvh0olMxb8Vapyw3g==\",\"salt\":\"jNpX6DyT5Tt/5dPXYiQfpQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "73523c93-6104-4494-b1c8-2af6087bcdd9", - "createdTimestamp" : 1678461818810, - "username" : "peopleops.partner8.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner8.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "218" ] - }, - "credentials" : [ { - "id" : "e839763b-aba2-4b4f-b715-b2c061b7430f", - "type" : "password", - "createdDate" : 1678461818843, - "secretData" : "{\"value\":\"M0KfNRU/4qt1WL/cGiSm6sKfN9PTK+6JiV96Y55Zg5CYaXH0ihTyGo62wS4T4YuyMm6/yTKz7+w3gdU4Zg/3Uw==\",\"salt\":\"sd/JEXtWTW4PetXzEBCNQA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "cdff7ae3-72eb-45b6-9424-6f56df9c3b1c", - "createdTimestamp" : 1678461818873, - "username" : "peopleops.partner9.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner9.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "219" ] - }, - "credentials" : [ { - "id" : "5ff8e042-a72e-4b46-9efa-e1910cd09d13", - "type" : "password", - "createdDate" : 1678461818908, - "secretData" : "{\"value\":\"q/hdvLKerMbnpe6yjC3VxDqCFi0ne7rD5A1K39EM+XgD6bFI62qKW5JIBB5BaGz/GrWYw7ipwMBaOvLBOubSkg==\",\"salt\":\"vfnCbi47kaYpILxbL0b3Tg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "dbf941e7-0b45-4bc6-ae9e-d7153d32ce47", - "createdTimestamp" : 1676302143401, + "id" : "b57086d7-f301-4e11-ab02-60b02c79163a", + "createdTimestamp" : 1680210955550, "username" : "peopleops.project-lead", "enabled" : true, "totp" : false, @@ -2726,10 +2249,10 @@ "spiffworkflow-employeeid" : [ "147" ] }, "credentials" : [ { - "id" : "85fa4e0a-2f59-4c51-8e8b-20acb9813ab9", + "id" : "e17da85a-70ab-4f7d-8cff-6f4826f35bbc", "type" : "password", - "createdDate" : 1676302143434, - "secretData" : "{\"value\":\"FBi/INvDb50hA4QNRcSbd5gc10Dspq7QppiCvQ6ualnH/MlTyVq5CL9o1BWya0xxVdG/4jxFkUlgpN1w5liZ1Q==\",\"salt\":\"s2yJeI/k96iSy8zHAdTVSQ==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955585, + "secretData" : "{\"value\":\"Llqk65fjzqPK6koWNRBPY6S1/T3GXgc4PHJSw/qlH7qzEQALzkKqMG1/C0s2EkAonj8WpIzZyEZKzRgMGqgh1g==\",\"salt\":\"1PoYqx4FYOST9EUEqbf9mA==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -3013,6 +2536,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "62862d90-e996-48ac-a8ee-5af43356dca4", + "createdTimestamp" : 1680538439355, + "username" : "ppg.ba6.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba6.sme@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "236" ] + }, + "credentials" : [ { + "id" : "b242e740-4d6f-412a-9719-84da41c8d1ed", + "type" : "password", + "createdDate" : 1680538439405, + "secretData" : "{\"value\":\"oveDoHPfm0m+SkrY3rLyFfIOK1tH+Fc8y5KC+CGMccNIPqLN5p7ytXcMjjcIhRdxAW9CzCGFUKhVnGAXa/PGIQ==\",\"salt\":\"kQZeYzICjjs6DO2hEgEbDw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "f56fe387-d153-42c2-880a-6726bd624bae", "createdTimestamp" : 1676302144802, @@ -3221,8 +2767,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "c684e919-6ae0-4031-a160-8e90338567b3", - "createdTimestamp" : 1678461818310, + "id" : "3ac1954a-713a-47c7-bd41-d618063a1053", + "createdTimestamp" : 1680538438655, "username" : "security6.sme", "enabled" : true, "totp" : false, @@ -3232,10 +2778,10 @@ "spiffworkflow-employeeid" : [ "211" ] }, "credentials" : [ { - "id" : "aff2f083-f6aa-4f93-899f-aaa3119a9739", + "id" : "e3ceb7b3-617d-4e52-980c-e5edd9ba48fb", "type" : "password", - "createdDate" : 1678461818346, - "secretData" : "{\"value\":\"7XGMuiylxKmwDwJZtiPNLllERwN8KLoILLE/BjjXOkqN3c+C+KYgNxPhrDt8dG9PDYOq/59vh/4E2y82GLaoEw==\",\"salt\":\"ufzmAcoMLoi0jtRHwGDadg==\",\"additionalParameters\":{}}", + "createdDate" : 1680538438713, + "secretData" : "{\"value\":\"iD1TfnQecNf0giE/5Ji0JQL/z91X4QmeqtiJKp/Dsfc55vPVh7llJlVygL7x2Ctcl4/+X10XgtSUkdAvdi3Tvw==\",\"salt\":\"6c0hHyISU/BOwh8vntCIfg==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -3370,21 +2916,21 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "cb99a5c4-2c28-4b19-b8c7-635b757fc817", - "createdTimestamp" : 1678461818231, - "username" : "waku.research.project-lead", + "id" : "654d55c5-2380-456f-a99b-936aa8cce4ee", + "createdTimestamp" : 1680538439445, + "username" : "web.project-lead", "enabled" : true, "totp" : false, "emailVerified" : false, - "email" : "waku.research.project-lead@status.im", + "email" : "web.project-lead@status.im", "attributes" : { - "spiffworkflow-employeeid" : [ "164" ] + "spiffworkflow-employeeid" : [ "235" ] }, "credentials" : [ { - "id" : "ed5fc4a1-d574-4940-b5e4-3a1ad9d122ba", + "id" : "c28af9d4-37bb-445a-a8cc-12a87bd8dd2c", "type" : "password", - "createdDate" : 1678461818268, - "secretData" : "{\"value\":\"K7MRRw2gO4bXHJH8U4cZU2rcVQT/hxw7kMHqN1uDae9FVqFEKh014qiwePOHr5K1xjUw8uU5e/d3HCcwhuRUQw==\",\"salt\":\"R4FdsDK6NvelgQ8gH7Me0g==\",\"additionalParameters\":{}}", + "createdDate" : 1680538439501, + "secretData" : "{\"value\":\"1ug7sJNXy9qUby6hABKyLJ8R0xa1pVldXFltuO6Xtqe7qIt9+eUbhN2o9dZ8vk5/aPIFaaIcQPOFZdaKOE/XWw==\",\"salt\":\"F3utYf4viApmPmC6FSZ0vA==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -4624,7 +4170,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4642,7 +4188,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4732,7 +4278,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "38a6b336-b026-46be-a8be-e8ff7b9da407", + "id" : "62d7bb2a-5919-48b2-a9f9-511ecf5474c7", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4754,7 +4300,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eb9fe753-cd35-4e65-bb34-e83ba7059566", + "id" : "7675760b-666a-4b8c-a9b8-da1e01c207fe", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4783,7 +4329,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "aa9c74f7-0426-4440-907f-4aa0f999eb1e", + "id" : "34e18ea8-f515-46dc-9dbf-5b79f8154564", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4805,7 +4351,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eb2a0849-c316-46bc-8b06-fd0cc50e3f32", + "id" : "933e581c-56d8-4614-b2a3-d2db10397ea0", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4827,7 +4373,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8f064003-823b-4be1-aa66-7324bf38c741", + "id" : "0986dc8c-4bcf-477f-8ba2-3cac02ea656f", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4849,7 +4395,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eef22678-b09c-4ca8-bdcf-90ea44ff0120", + "id" : "534381e4-b0b9-43b2-9ac5-9f1e006b5920", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4871,7 +4417,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4367f263-ef2c-426e-b5cd-49fff868ea1a", + "id" : "922e84ab-85db-494a-8a8c-84d3b0c675f4", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4893,7 +4439,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b2e9c608-1779-4c03-b32a-03c77450abae", + "id" : "24b1b409-b6fc-44dc-9a97-93b2f4a78c89", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4916,7 +4462,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "a8c79324-1881-4bb0-a8a2-83dfd54cacd1", + "id" : "c015a916-a45b-4797-a466-2399164da6fe", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4938,7 +4484,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d1aa83c6-da36-4cb6-b6ed-f6ec556df614", + "id" : "fc7aec31-855b-4993-b770-57660ff0524f", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4974,7 +4520,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2afecfef-4bfb-4842-b338-7ed032a618d2", + "id" : "9769d765-42c8-4391-a7ec-aa24f0e84040", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -5010,7 +4556,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "34dc1854-4969-4065-90e6-fef38b0dea98", + "id" : "49a937cc-9d51-43d0-a379-67aaae38c51a", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -5039,7 +4585,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "40557323-dbbc-48ee-9ed1-748b11c9628d", + "id" : "1a766b69-7ead-442a-84a4-083cd84949cd", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -5054,7 +4600,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d18b5c50-39fa-4b11-a7d2-0e6768e275c1", + "id" : "e4ac0543-cfb6-4232-947d-52b8615e0629", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -5077,7 +4623,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "976be80d-a88b-412c-8ad2-9ebe427793d4", + "id" : "86247ee8-b507-406b-9d32-3c68c80084a5", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -5099,7 +4645,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "83b3a411-ff7c-4cba-845a-9554c536d6b1", + "id" : "70ef5a26-e3bb-4ba7-a05a-d205b0a3836c", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -5121,7 +4667,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1cb835a6-b38c-4f29-a6d8-d04d0a84d05e", + "id" : "89abf09a-bfb4-4dea-b164-ca7c563b4009", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -5137,7 +4683,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7ec06c82-6802-4ff4-a3ab-9b6a0b8dbc4b", + "id" : "52d31bf0-dcb6-4b01-a252-b2ba705df036", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -5173,7 +4719,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f3bc2f7b-2074-4d93-9578-3abf648a6681", + "id" : "22041b6b-6d9e-43eb-8d2a-94a3052c49aa", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -5209,7 +4755,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e62e031b-9922-4682-b867-bc5c3a4a7e99", + "id" : "153aaf25-b6d9-42b4-9740-f63c94c16626", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -5225,13 +4771,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "c449f0aa-5f3c-4107-9f04-3222fa93a486", + "id" : "e0075b39-a2ad-47de-9ee6-e61073387e71", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "f7a6ed54-0ab8-4f29-9877-960bd65bf394", + "id" : "aa24bff3-bd25-4b2a-973f-63fea5c21dd1", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/admin b/spiffworkflow-backend/keycloak/test_user_lists/admin index aa676cd9..a764901c 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/admin +++ b/spiffworkflow-backend/keycloak/test_user_lists/admin @@ -1,2 +1,4 @@ email,spiffworkflow-employeeid admin@spiffworkflow.org +jason@sartography.com +kevin@sartography.com diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 1e280bae..9b587465 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -1,15 +1,9 @@ -email,spiffworkflow-employeeid -admin@spiffworkflow.org -alex@sartography.com,111 -dan@sartography.com,115 -daniel@sartography.com -elizabeth@sartography.com -j@sartography.com -jason@sartography.com -jon@sartography.com -kb@sartography.com -kevin@sartography.com -madhurya@sartography.com,160 -madhurya@ymail.com,161 -mike@sartography.com -natalia@sartography.com +email,pass,spiffworkflow-employeeid +alex@sartography.com,,111 +dan@sartography.com,,115 +daniel@sartography.com,, +elizabeth@sartography.com,, +j@sartography.com,, +jon@sartography.com,, +kb@sartography.com,, +madhurya@sartography.com,,160 diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 5af7736d..eb866ed7 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -1,115 +1,97 @@ -email,spiffworkflow-employeeid +email,pass,spiffworkflow-employeeid # admin@spiffworkflow.org amir@status.im -app.program-lead@status.im,121 -codex-a1.sme@status.im,209 -codex.project-lead@status.im,153 -codex.sme@status.im,185 -codex1.sme@status.im,186 -codex2.sme@status.im,187 -codex3.sme@status.im,188 -codex4.sme@status.im,189 -codex5.sme@status.im,190 -core-a1.contributor@status.im,202 -core-a2.contributor@status.im,203 -core1.contributor@status.im,155 -core10.contributor@status.im,225 -core11.contributor@status.im,226 -core12.contributor@status.im,227 -core13.contributor@status.im,228 -core14.contributor@status.im,229 -core15.contributor@status.im,230 -core16.contributor@status.im,231 -core17.contributor@status.im,232 -core18.contributor@status.im,233 -core2.contributor@status.im,156 -core3.contributor@status.im,157 -core4.contributor@status.im,158 -core5.contributor@status.im,159 -core6.contributor@status.im,199 -core7.contributor@status.im,222 -core8.contributor@status.im,223 -core9.contributor@status.im,224 -core@status.im,113 +app.program-lead@status.im,,121 +codex-a1.sme@status.im,,209 +codex.project-lead@status.im,,153 +codex.sme@status.im,,185 +codex1.sme@status.im,,186 +codex2.sme@status.im,,187 +codex3.sme@status.im,,188 +codex4.sme@status.im,,189 +codex5.sme@status.im,,190 +core-a1.contributor@status.im,,202 +core-a2.contributor@status.im,,203 +core1.contributor@status.im,,155 +core2.contributor@status.im,,156 +core3.contributor@status.im,,157 +core4.contributor@status.im,,158 +core5.contributor@status.im,,159 +core6.contributor@status.im,core6.contributorx,199 +core@status.im,,113 dao.project.lead@status.im -desktop-a1.sme@status.im,210 +desktop-a1.sme@status.im,,210 desktop.program.lead@status.im -desktop.project-lead@status.im,192 +desktop.project-lead@status.im,,192 desktop.project.lead@status.im -desktop.sme@status.im,193 -desktop1.sme@status.im,194 -desktop2.sme@status.im,195 -desktop3.sme@status.im,196 -desktop4.sme@status.im,197 -desktop5.sme@status.im,198 -fin@status.im,118 -finance.project-lead@status.im,128 +desktop.sme@status.im,,193 +desktop1.sme@status.im,,194 +desktop2.sme@status.im,,195 +desktop3.sme@status.im,,196 +desktop4.sme@status.im,,197 +desktop5.sme@status.im,,198 +fin@status.im,,118 finance_user1@status.im -fluffy.project-lead@status.im,162 -harmeet@status.im,109 -infra-a1.sme@status.im,204 -infra.project-lead@status.im,130 -infra.sme@status.im,119 -infra1.sme@status.im,131 -infra2.sme@status.im,132 -infra3.sme@status.im,167 -infra4.sme@status.im,175 -infra5.sme@status.im,176 -infra6.sme@status.im,212 +fluffy.project-lead@status.im,,162 +harmeet@status.im,,109 +infra-a1.sme@status.im,,204 +infra.project-lead@status.im,infra.project-leadx,130 +infra.sme@status.im,,119 +infra1.sme@status.im,,131 +infra2.sme@status.im,,132 +infra3.sme@status.im,,167 +infra4.sme@status.im,,175 +infra5.sme@status.im,,176 +infra6.sme@status.im,infra6.smex,212 jakub@status.im +jamescheung@status.im,,234 jarrad@status.im -lead@status.im,114 -legal-a1.sme@status.im,205 -legal.project-lead@status.im,133 -legal.sme@status.im,125 -legal1.sme@status.im,134 -legal2.sme@status.im,165 -legal3.sme@status.im,166 -legal4.sme@status.im,177 -legal5.sme@status.im,178 -legal6.sme@status.im,213 -legal7.sme@status.im,214 -legal8.sme@status.im,215 -logos.program-lead@status.im,160 -manuchehr@status.im,110 -mobile.project-lead@status.im,221 -nimbus.program-lead@status.im,161 -nomos.project-lead@status.im,220 -peopleops.partner-a1.sme@status.im,208 -peopleops.partner.sme@status.im,148 -peopleops.partner1.sme@status.im,149 -peopleops.partner2.sme@status.im,173 -peopleops.partner3.sme@status.im,174 -peopleops.partner4.sme@status.im,181 -peopleops.partner5.sme@status.im,182 -peopleops.partner6.sme@status.im,216 -peopleops.partner7.sme@status.im,217 -peopleops.partner8.sme@status.im,218 -peopleops.partner9.sme@status.im,219 -peopleops.partner@status.im,150 -peopleops.project-lead@status.im,147 -peopleops.talent.sme@status.im,143 -peopleops.talent1.sme@status.im,142 -peopleops.talent@status.im,141 -ppg.ba-a1.sme@status.im,207 -ppg.ba.project-lead@status.im,137 -ppg.ba.sme@status.im,138 -ppg.ba1.sme@status.im,170 -ppg.ba2.sme@status.im,171 -ppg.ba3.sme@status.im,172 -ppg.ba4.sme@status.im,200 -ppg.ba5.sme@status.im,201 -ppg.ba@status.im,127 -sasha@status.im,112 -security-a1.sme@status.im,206 -security.project-lead@status.im,151 -security.sme@status.im,123 -security1.sme@status.im,135 -security2.sme@status.im,168 -security3.sme@status.im,169 -security4.sme@status.im,179 -security5.sme@status.im,180 -security6.sme@status.im,211 -services.lead@status.im,122 -vac.program-lead@status.im,163 -waku.research.project-lead@status.im,164 +lead@status.im,,114 +legal-a1.sme@status.im,,205 +legal.project-lead@status.im,legal.project-leadx,133 +legal.sme@status.im,,125 +legal1.sme@status.im,,134 +legal2.sme@status.im,,165 +legal3.sme@status.im,,166 +legal4.sme@status.im,,177 +legal5.sme@status.im,,178 +legal6.sme@status.im,legal6.smex,213 +logos.program-lead@status.im,,160 +manuchehr@status.im,,110 +nimbus.program-lead@status.im,,161 +peopleops.partner-a1.sme@status.im,,208 +peopleops.partner.sme@status.im,,148 +peopleops.partner1.sme@status.im,,149 +peopleops.partner2.sme@status.im,,173 +peopleops.partner3.sme@status.im,,174 +peopleops.partner4.sme@status.im,,181 +peopleops.partner5.sme@status.im,,182 +peopleops.partner6.sme@status.im,peopleops.partner6.smex,216 +peopleops.partner@status.im,,150 +peopleops.project-lead@status.im,peopleops.project-leadx,147 +peopleops.talent.sme@status.im,,143 +peopleops.talent1.sme@status.im,,142 +peopleops.talent@status.im,,141 +ppg.ba-a1.sme@status.im,,207 +ppg.ba.project-lead@status.im,,137 +ppg.ba.sme@status.im,,138 +ppg.ba1.sme@status.im,,170 +ppg.ba2.sme@status.im,,171 +ppg.ba3.sme@status.im,,172 +ppg.ba4.sme@status.im,,200 +ppg.ba5.sme@status.im,,201 +ppg.ba6.sme@status.im,ppg.ba6.smex,236 +ppg.ba@status.im,,127 +sasha@status.im,,112 +security-a1.sme@status.im,,206 +security.project-lead@status.im,,151 +security.sme@status.im,,123 +security1.sme@status.im,,135 +security2.sme@status.im,,168 +security3.sme@status.im,,169 +security4.sme@status.im,,179 +security5.sme@status.im,,180 +security6.sme@status.im,security6.smex,211 +services.lead@status.im,,122 +vac.program-lead@status.im,,163 +web.project-lead@status.im,web.project-leadx,235 diff --git a/spiffworkflow-backend/migrations/versions/b652c232839f_.py b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py similarity index 95% rename from spiffworkflow-backend/migrations/versions/b652c232839f_.py rename to spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py index dbf5b276..d2ef7c10 100644 --- a/spiffworkflow-backend/migrations/versions/b652c232839f_.py +++ b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: b652c232839f +Revision ID: 0b5dd14bfbac Revises: -Create Date: 2023-03-17 16:50:32.774216 +Create Date: 2023-03-23 16:25:33.288500 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = 'b652c232839f' +revision = '0b5dd14bfbac' down_revision = None branch_labels = None depends_on = None @@ -115,19 +115,22 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=True), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), - sa.Column('parent_process_id', sa.Integer(), nullable=True), + sa.Column('top_level_process_id', sa.Integer(), nullable=True), + sa.Column('direct_parent_process_id', sa.Integer(), nullable=True), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('json_data_hash', sa.String(length=255), nullable=False), sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), - sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['direct_parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['top_level_process_id'], ['bpmn_process.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('guid') ) op.create_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), 'bpmn_process', ['bpmn_process_definition_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_direct_parent_process_id'), 'bpmn_process', ['direct_parent_process_id'], unique=False) op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False) - op.create_index(op.f('ix_bpmn_process_parent_process_id'), 'bpmn_process', ['parent_process_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_top_level_process_id'), 'bpmn_process', ['top_level_process_id'], unique=False) op.create_table('bpmn_process_definition_relationship', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False), @@ -248,7 +251,6 @@ def upgrade(): sa.Column('status', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), - sa.Column('spiff_step', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), @@ -344,22 +346,6 @@ def upgrade(): op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) - op.create_table('spiff_step_details', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('spiff_step', sa.Integer(), nullable=False), - sa.Column('task_json', sa.JSON(), nullable=False), - sa.Column('task_id', sa.String(length=50), nullable=False), - sa.Column('task_state', sa.String(length=50), nullable=False), - sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), - sa.Column('delta_json', sa.JSON(), nullable=True), - sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step') - ) - op.create_index(op.f('ix_spiff_step_details_process_instance_id'), 'spiff_step_details', ['process_instance_id'], unique=False) op.create_table('task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=False), @@ -465,8 +451,6 @@ def downgrade(): op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task') op.drop_table('task') - op.drop_index(op.f('ix_spiff_step_details_process_instance_id'), table_name='spiff_step_details') - op.drop_table('spiff_step_details') op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') @@ -519,8 +503,9 @@ def downgrade(): op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), table_name='bpmn_process_definition_relationship') op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), table_name='bpmn_process_definition_relationship') op.drop_table('bpmn_process_definition_relationship') - op.drop_index(op.f('ix_bpmn_process_parent_process_id'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_top_level_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_direct_parent_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), table_name='bpmn_process') op.drop_table('bpmn_process') op.drop_index(op.f('ix_user_service_id'), table_name='user') diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index c00513fb..0c439e11 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1,14 +1,14 @@ [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "alembic" -version = "1.8.1" +version = "1.10.2" description = "A database migration tool for SQLAlchemy." category = "main" optional = false @@ -17,6 +17,7 @@ python-versions = ">=3.7" [package.dependencies] Mako = "*" SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" [package.extras] tz = ["python-dateutil"] @@ -45,11 +46,11 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "apscheduler" -version = "3.9.1.post1" +version = "3.10.1" description = "In-process task scheduler with Cron-like capabilities" category = "main" optional = false -python-versions = "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.6" [package.dependencies] pytz = "*" @@ -58,21 +59,20 @@ six = ">=1.4.0" tzlocal = ">=2.0,<3.0.0 || >=4.0.0" [package.extras] -asyncio = ["trollius"] doc = ["sphinx", "sphinx-rtd-theme"] gevent = ["gevent"] mongodb = ["pymongo (>=3.0)"] redis = ["redis (>=3.0)"] rethinkdb = ["rethinkdb (>=2.4.0)"] -sqlalchemy = ["sqlalchemy (>=0.8)"] -testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] tornado = ["tornado (>=4.3)"] twisted = ["twisted"] zookeeper = ["kazoo"] [[package]] name = "astroid" -version = "2.13.3" +version = "2.15.1" description = "An abstract syntax tree for Python with inference support." category = "main" optional = false @@ -88,28 +88,26 @@ wrapt = [ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "Babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "main" -optional = false python-versions = ">=3.6" -[package.dependencies] -pytz = ">=2015.7" +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +category = "main" +optional = false +python-versions = ">=3.7" [[package]] name = "bandit" @@ -144,7 +142,7 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.12.0" description = "Screen-scraping library" category = "dev" optional = false @@ -167,7 +165,7 @@ python-versions = "*" [[package]] name = "black" -version = "22.10.0" +version = "22.12.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -275,14 +273,11 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = ">=3.7.0" [[package]] name = "classify-imports" @@ -355,11 +350,11 @@ PyYAML = ">=3.11" [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" [[package]] name = "configparser" @@ -375,7 +370,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec [[package]] name = "connexion" -version = "2.14.1" +version = "2.14.2" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" category = "main" optional = false @@ -383,7 +378,7 @@ python-versions = ">=3.6" [package.dependencies] clickclick = ">=1.2,<21" -flask = ">=1.0.4,<3" +flask = ">=1.0.4,<2.3" inflection = ">=0.3.1,<0.6" itsdangerous = ">=0.24" jsonschema = ">=2.5.1,<5" @@ -391,14 +386,14 @@ packaging = ">=20" PyYAML = ">=5.1,<7" requests = ">=2.9.1,<3" swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} -werkzeug = ">=1.0,<3" +werkzeug = ">=1.0,<2.3" [package.extras] aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] docs = ["sphinx-autoapi (==1.8.1)"] -flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] +flask = ["flask (>=1.0.4,<2.3)", "itsdangerous (>=0.24)"] swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] -tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] +tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<2.3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] [[package]] name = "coverage" @@ -445,20 +440,20 @@ python-versions = ">=3.6,<4.0" [[package]] name = "dateparser" -version = "1.1.2" +version = "1.1.8" description = "Date parsing library designed to parse dates from HTML pages" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] python-dateutil = "*" pytz = "*" -regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27,<2022.3.15" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" tzlocal = "*" [package.extras] -calendars = ["convertdate", "convertdate", "hijri-converter"] +calendars = ["convertdate", "hijri-converter"] fasttext = ["fasttext"] langdetect = ["langdetect"] @@ -493,7 +488,7 @@ python-versions = ">=3.7" name = "dparse" version = "0.6.2" description = "A parser for Python dependency files" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -507,7 +502,7 @@ pipenv = ["pipenv"] [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.1" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -518,15 +513,15 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.8.0" +version = "3.10.7" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -557,7 +552,7 @@ pycodestyle = "*" [[package]] name = "flake8-bugbear" -version = "22.10.25" +version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -572,11 +567,11 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "flake8-docstrings" -version = "1.6.0" +version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3" @@ -607,7 +602,7 @@ pygments = "*" restructuredtext-lint = "*" [[package]] -name = "Flask" +name = "flask" version = "2.2.2" description = "A simple framework for building complex web applications." category = "main" @@ -626,8 +621,8 @@ async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] [[package]] -name = "Flask-Admin" -version = "1.6.0" +name = "flask-admin" +version = "1.6.1" description = "Simple and extensible admin interface framework for Flask" category = "main" optional = false @@ -642,7 +637,7 @@ aws = ["boto"] azure = ["azure-storage-blob"] [[package]] -name = "Flask-Bcrypt" +name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." category = "main" @@ -685,7 +680,7 @@ reference = "main" resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" [[package]] -name = "Flask-Cors" +name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" category = "main" @@ -713,7 +708,7 @@ Werkzeug = ">=0.14" asymmetric-crypto = ["cryptography (>=3.3.1)"] [[package]] -name = "Flask-Mail" +name = "flask-mail" version = "0.9.1" description = "Flask extension for sending email" category = "main" @@ -745,20 +740,20 @@ sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmal tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] [[package]] -name = "Flask-Migrate" -version = "3.1.0" +name = "flask-migrate" +version = "4.0.4" description = "SQLAlchemy database migrations for Flask applications using Alembic." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -alembic = ">=0.7" +alembic = ">=1.9.0" Flask = ">=0.9" Flask-SQLAlchemy = ">=1.0" [[package]] -name = "Flask-RESTful" +name = "flask-restful" version = "0.3.9" description = "Simple framework for creating REST APIs" category = "main" @@ -788,7 +783,7 @@ pycryptodome = "*" [[package]] name = "flask-sqlalchemy" -version = "3.0.2" +version = "3.0.3" description = "Add SQLAlchemy support to your Flask application." category = "main" optional = false @@ -800,7 +795,7 @@ SQLAlchemy = ">=1.4.18" [[package]] name = "furo" -version = "2022.9.29" +version = "2023.3.27" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false @@ -809,24 +804,24 @@ python-versions = ">=3.7" [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" -sphinx = ">=4.0,<6.0" +sphinx = ">=5.0,<7.0" sphinx-basic-ng = "*" [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" [[package]] -name = "GitPython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" +name = "gitpython" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" @@ -836,7 +831,7 @@ gitdb = ">=4.0.1,<5" [[package]] name = "greenlet" -version = "2.0.1" +version = "2.0.2" description = "Lightweight in-process concurrent programming" category = "main" optional = false @@ -844,7 +839,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] +test = ["objgraph", "psutil"] [[package]] name = "gunicorn" @@ -865,7 +860,7 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "identify" -version = "2.5.6" +version = "2.5.22" description = "File identification library for Python" category = "dev" optional = false @@ -892,7 +887,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.13.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -902,7 +897,7 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] @@ -916,23 +911,23 @@ python-versions = ">=3.5" [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "isort" -version = "5.11.4" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "main" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -945,7 +940,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -960,7 +955,7 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.16.0" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -1004,11 +999,11 @@ zookeeper = ["kazoo (>=1.3.1)"] [[package]] name = "lazy-object-proxy" -version = "1.7.1" +version = "1.9.0" description = "A fast and thorough lazy object proxy." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "livereload" @@ -1024,7 +1019,7 @@ tornado = {version = "*", markers = "python_version > \"2.7\""} [[package]] name = "lxml" -version = "4.9.1" +version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -1037,8 +1032,8 @@ htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] [[package]] -name = "Mako" -version = "1.2.3" +name = "mako" +version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -1053,8 +1048,8 @@ lingua = ["lingua"] testing = ["pytest"] [[package]] -name = "MarkupSafe" -version = "2.1.1" +name = "markupsafe" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false @@ -1062,7 +1057,7 @@ python-versions = ">=3.7" [[package]] name = "marshmallow" -version = "3.18.0" +version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." category = "main" optional = false @@ -1072,9 +1067,9 @@ python-versions = ">=3.7" packaging = ">=17.0" [package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1090,7 +1085,7 @@ marshmallow = ">=2.0.0" [[package]] name = "marshmallow-sqlalchemy" -version = "0.28.1" +version = "0.29.0" description = "SQLAlchemy integration with the marshmallow (de)serialization library" category = "main" optional = false @@ -1099,12 +1094,12 @@ python-versions = ">=3.7" [package.dependencies] marshmallow = ">=3.0.0" packaging = ">=21.3" -SQLAlchemy = ">=1.3.0" +SQLAlchemy = ">=1.4.40,<3.0" [package.extras] -dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] +dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.13)", "sphinx (==6.1.3)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)"] tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] [[package]] @@ -1117,29 +1112,30 @@ python-versions = "*" [[package]] name = "mypy" -version = "0.982" +version = "1.1.1" description = "Optional static typing for Python" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.5" [[package]] name = "mysql-connector-python" @@ -1181,7 +1177,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -1189,7 +1185,7 @@ python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.10.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false @@ -1208,15 +1204,15 @@ flake8 = ">=3.9.1" [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -1232,7 +1228,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "2.20.0" +version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1243,12 +1239,11 @@ cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" +virtualenv = ">=20.10.0" [[package]] name = "pre-commit-hooks" -version = "4.3.0" +version = "4.4.0" description = "Some out-of-the-box hooks for pre-commit." category = "dev" optional = false @@ -1283,11 +1278,11 @@ prometheus-client = "*" [[package]] name = "prompt-toolkit" -version = "3.0.31" +version = "3.0.38" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" [package.dependencies] wcwidth = "*" @@ -1302,7 +1297,7 @@ python-versions = ">=3.7" [[package]] name = "psycopg2" -version = "2.9.4" +version = "2.9.5" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false @@ -1334,17 +1329,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1355,8 +1350,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "Pygments" -version = "2.13.0" +name = "pygments" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1381,14 +1376,14 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.15.10" +version = "2.17.1" description = "python code static checker" category = "main" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" +astroid = ">=2.15.0,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1418,7 +1413,7 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false @@ -1426,7 +1421,7 @@ python-versions = ">=3.7" [[package]] name = "pytest" -version = "7.2.0" +version = "7.2.2" description = "pytest: simple powerful testing with Python" category = "main" optional = false @@ -1524,7 +1519,7 @@ tzdata = {version = "*", markers = "python_version >= \"3.6\""} [[package]] name = "pyupgrade" -version = "3.1.0" +version = "3.3.1" description = "A tool to automatically upgrade syntax for newer versions." category = "dev" optional = false @@ -1534,7 +1529,7 @@ python-versions = ">=3.7" tokenize-rt = ">=3.2.0" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1543,11 +1538,11 @@ python-versions = ">=3.6" [[package]] name = "regex" -version = "2022.3.2" +version = "2023.3.23" description = "Alternative regular expression module, to replace re." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [[package]] name = "reorder-python-imports" @@ -1562,7 +1557,7 @@ classify-imports = ">=4.1" [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false @@ -1570,7 +1565,7 @@ python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" @@ -1602,10 +1597,10 @@ python-versions = "*" docutils = ">=0.11,<1.0" [[package]] -name = "ruamel.yaml" +name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" +category = "main" optional = false python-versions = ">=3" @@ -1620,22 +1615,22 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" [[package]] name = "safety" -version = "2.3.1" +version = "2.3.5" description = "Checks installed dependencies for known vulnerabilities and licenses." -category = "dev" +category = "main" optional = false python-versions = "*" [package.dependencies] Click = ">=8.0.2" dparse = ">=0.6.2" -packaging = ">=21.0" +packaging = ">=21.0,<22.0" requests = "*" "ruamel.yaml" = ">=0.17.21" setuptools = ">=19.3" @@ -1646,7 +1641,7 @@ gitlab = ["python-gitlab (>=1.3.0)"] [[package]] name = "sentry-sdk" -version = "1.16.0" +version = "1.18.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -1683,20 +1678,20 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "65.5.0" +version = "65.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simplejson" -version = "3.17.6" +version = "3.18.4" description = "Simple, fast, extensible JSON encoder/decoder for Python" category = "main" optional = false @@ -1728,14 +1723,14 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] -name = "Sphinx" +name = "sphinx" version = "5.3.0" description = "Python documentation generator" category = "main" @@ -1768,7 +1763,7 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx-autoapi" -version = "2.0.0" +version = "2.1.0" description = "Sphinx API documentation generator" category = "main" optional = false @@ -1778,7 +1773,7 @@ python-versions = ">=3.7" astroid = ">=2.7" Jinja2 = "*" PyYAML = "*" -sphinx = ">=4.0" +sphinx = ">=5.2.0" unidecode = "*" [package.extras] @@ -1818,7 +1813,7 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta [[package]] name = "sphinx-click" -version = "4.3.0" +version = "4.4.0" description = "Sphinx extension that automatically documents click applications" category = "dev" optional = false @@ -1831,11 +1826,11 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -1855,11 +1850,11 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -1918,44 +1913,47 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "3c3345c85dd7f3b7112ad04aaa6487abbd2e9414" +resolved_reference = "1c877dd768053b4cce4c4e14c92caa3216371751" [[package]] -name = "SQLAlchemy" -version = "1.4.42" +name = "sqlalchemy" +version = "2.0.7" description = "Database Abstraction Library" category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "sqlalchemy-stubs" version = "0.4" -description = "SQLAlchemy stubs and mypy plugin" +description = "" category = "main" optional = false python-versions = "*" @@ -1973,7 +1971,7 @@ resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" [[package]] name = "stevedore" -version = "4.0.1" +version = "5.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1995,17 +1993,17 @@ Jinja2 = ">=2.0" [[package]] name = "tokenize-rt" -version = "4.2.1" +version = "5.0.0" description = "A wrapper around the stdlib `tokenize` which roundtrips." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -2019,11 +2017,11 @@ python-versions = ">=3.7" [[package]] name = "tomlkit" -version = "0.11.6" +version = "0.11.7" description = "Style preserving TOML library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "tornado" @@ -2055,14 +2053,14 @@ python-versions = "*" [[package]] name = "types-dateparser" -version = "1.1.4.1" +version = "1.1.4.9" description = "Typing stubs for dateparser" category = "main" optional = false python-versions = "*" [[package]] -name = "types-Flask" +name = "types-flask" version = "1.1.6" description = "Typing stubs for Flask" category = "main" @@ -2075,7 +2073,7 @@ types-Jinja2 = "*" types-Werkzeug = "*" [[package]] -name = "types-Jinja2" +name = "types-jinja2" version = "2.11.9" description = "Typing stubs for Jinja2" category = "main" @@ -2086,7 +2084,7 @@ python-versions = "*" types-MarkupSafe = "*" [[package]] -name = "types-MarkupSafe" +name = "types-markupsafe" version = "1.1.10" description = "Typing stubs for MarkupSafe" category = "main" @@ -2095,15 +2093,15 @@ python-versions = "*" [[package]] name = "types-pytz" -version = "2022.5.0.0" +version = "2022.7.1.2" description = "Typing stubs for pytz" category = "main" optional = false python-versions = "*" [[package]] -name = "types-PyYAML" -version = "6.0.12" +name = "types-pyyaml" +version = "6.0.12.9" description = "Typing stubs for PyYAML" category = "main" optional = false @@ -2111,7 +2109,7 @@ python-versions = "*" [[package]] name = "types-requests" -version = "2.28.11.2" +version = "2.28.11.17" description = "Typing stubs for requests" category = "main" optional = false @@ -2122,14 +2120,14 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.25.1" +version = "1.26.25.10" description = "Typing stubs for urllib3" category = "main" optional = false python-versions = "*" [[package]] -name = "types-Werkzeug" +name = "types-werkzeug" version = "1.0.9" description = "Typing stubs for Werkzeug" category = "main" @@ -2138,7 +2136,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false @@ -2146,7 +2144,7 @@ python-versions = ">=3.7" [[package]] name = "tzdata" -version = "2022.5" +version = "2023.2" description = "Provider of IANA time zone data" category = "main" optional = false @@ -2154,22 +2152,21 @@ python-versions = ">=2" [[package]] name = "tzlocal" -version = "4.2" +version = "4.3" description = "tzinfo object for the local timezone" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pytz-deprecation-shim = "*" tzdata = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] -test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] +devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] [[package]] -name = "Unidecode" +name = "unidecode" version = "1.3.6" description = "ASCII transliterations of Unicode text" category = "main" @@ -2178,11 +2175,11 @@ python-versions = ">=3.5" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -2199,32 +2196,32 @@ python-versions = ">=3.6" [[package]] name = "virtualenv" -version = "20.16.5" +version = "20.21.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -distlib = ">=0.3.5,<1" +distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false python-versions = "*" [[package]] -name = "Werkzeug" -version = "2.2.2" +name = "werkzeug" +version = "2.2.3" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -2238,14 +2235,14 @@ watchdog = ["watchdog"] [[package]] name = "wrapt" -version = "1.14.1" +version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] -name = "WTForms" +name = "wtforms" version = "3.0.1" description = "Form validation and rendering for Python web development." category = "main" @@ -2260,7 +2257,7 @@ email = ["email-validator"] [[package]] name = "xdoctest" -version = "1.1.0" +version = "1.1.1" description = "A rewrite of the builtin doctest module" category = "dev" optional = false @@ -2272,41 +2269,43 @@ Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0 six = "*" [package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] +all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] colors = ["Pygments", "Pygments", "colorama"] jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] runtime-strict = ["six (==1.11.0)"] -tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] -tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] +tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"] +tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"] +tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"] +tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] [[package]] name = "zipp" -version = "3.10.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "d851a6df0cc4fbb77d658455deb8bc3cb9c82fa7789ea581f20b373a96ae6078" +content-hash = "9fea44386fbab29102a051a254058909568c4ee3dbd6a402fb91aacbcf1f7fd2" [metadata.files] alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] alembic = [ - {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, - {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, + {file = "alembic-1.10.2-py3-none-any.whl", hash = "sha256:8b48368f6533c064b39c024e1daba15ae7f947eac84185c28c06bbe1301a5497"}, + {file = "alembic-1.10.2.tar.gz", hash = "sha256:457eafbdc0769d855c2c92cbafe6b7f319f916c80cf4ed02b8f394f38b51b89d"}, ] amqp = [ {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, @@ -2317,20 +2316,20 @@ aniso8601 = [ {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, ] apscheduler = [ - {file = "APScheduler-3.9.1.post1-py2.py3-none-any.whl", hash = "sha256:c8c618241dbb2785ed5a687504b14cb1851d6f7b5a4edf3a51e39cc6a069967a"}, - {file = "APScheduler-3.9.1.post1.tar.gz", hash = "sha256:b2bea0309569da53a7261bfa0ce19c67ddbfe151bda776a6a907579fdbd3eb2a"}, + {file = "APScheduler-3.10.1-py3-none-any.whl", hash = "sha256:e813ad5ada7aff36fb08cdda746b520531eaac7757832abc204868ba78e0c8f6"}, + {file = "APScheduler-3.10.1.tar.gz", hash = "sha256:0293937d8f6051a0f493359440c1a1b93e882c57daf0197afeff0e727777b96e"}, ] astroid = [ - {file = "astroid-2.13.3-py3-none-any.whl", hash = "sha256:14c1603c41cc61aae731cad1884a073c4645e26f126d13ac8346113c95577f3b"}, - {file = "astroid-2.13.3.tar.gz", hash = "sha256:6afc22718a48a689ca24a97981ad377ba7fb78c133f40335dfd16772f29bcfb1"}, + {file = "astroid-2.15.1-py3-none-any.whl", hash = "sha256:89860bda98fe2bbd1f5d262229be7629d778ce280de68d95d4a73d1f592ad268"}, + {file = "astroid-2.15.1.tar.gz", hash = "sha256:af4e0aff46e2868218502789898269ed95b663fba49e65d91c1e09c966266c34"}, ] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] -Babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +babel = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] bandit = [ {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, @@ -2360,35 +2359,26 @@ bcrypt = [ {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, ] beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, + {file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"}, + {file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"}, ] billiard = [ {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] blinker = [ {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, @@ -2473,8 +2463,81 @@ cfgv = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] classify-imports = [ {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, @@ -2501,16 +2564,16 @@ clickclick = [ {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, ] colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] configparser = [ {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, ] connexion = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, + {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, + {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, ] coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, @@ -2594,8 +2657,8 @@ darglint = [ {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] dateparser = [ - {file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"}, - {file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"}, + {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, + {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, ] dill = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, @@ -2614,12 +2677,12 @@ dparse = [ {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, ] filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"}, + {file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -2629,12 +2692,12 @@ flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.10.25.tar.gz", hash = "sha256:89e51284eb929fbb7f23fbd428491e7427f7cdc8b45a77248daffe86a039d696"}, - {file = "flake8_bugbear-22.10.25-py3-none-any.whl", hash = "sha256:584631b608dc0d7d3f9201046d5840a45502da4732d5e8df6c7ac1694a91cb9e"}, + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, ] flake8-docstrings = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, @@ -2644,19 +2707,20 @@ flake8-rst-docstrings = [ {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, ] -Flask = [ +flask = [ {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, ] -Flask-Admin = [ - {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, +flask-admin = [ + {file = "Flask-Admin-1.6.1.tar.gz", hash = "sha256:24cae2af832b6a611a01d7dc35f42d266c1d6c75a426b869d8cb241b78233369"}, + {file = "Flask_Admin-1.6.1-py3-none-any.whl", hash = "sha256:fd8190f1ec3355913a22739c46ed3623f1d82b8112cde324c60a6fc9b21c9406"}, ] -Flask-Bcrypt = [ +flask-bcrypt = [ {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, ] flask-bpmn = [] -Flask-Cors = [ +flask-cors = [ {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, ] @@ -2664,18 +2728,18 @@ flask-jwt-extended = [ {file = "Flask-JWT-Extended-4.4.4.tar.gz", hash = "sha256:62b521d75494c290a646ae8acc77123721e4364790f1e64af0038d823961fbf0"}, {file = "Flask_JWT_Extended-4.4.4-py2.py3-none-any.whl", hash = "sha256:a85eebfa17c339a7260c4643475af444784ba6de5588adda67406f0a75599553"}, ] -Flask-Mail = [ +flask-mail = [ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, ] flask-marshmallow = [ {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, ] -Flask-Migrate = [ - {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, - {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, +flask-migrate = [ + {file = "Flask-Migrate-4.0.4.tar.gz", hash = "sha256:73293d40b10ac17736e715b377e7b7bde474cb8105165d77474df4c3619b10b3"}, + {file = "Flask_Migrate-4.0.4-py3-none-any.whl", hash = "sha256:77580f27ab39bc68be4906a43c56d7674b45075bc4f883b1d0b985db5164d58f"}, ] -Flask-RESTful = [ +flask-restful = [ {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, ] @@ -2684,90 +2748,90 @@ flask-simple-crypt = [ {file = "Flask_Simple_Crypt-0.3.3-py3-none-any.whl", hash = "sha256:08c3fcad955ac148bb885b1de4798c1cfce8512452072beee414bacf1552e8ef"}, ] flask-sqlalchemy = [ - {file = "Flask-SQLAlchemy-3.0.2.tar.gz", hash = "sha256:16199f5b3ddfb69e0df2f52ae4c76aedbfec823462349dabb21a1b2e0a2b65e9"}, - {file = "Flask_SQLAlchemy-3.0.2-py3-none-any.whl", hash = "sha256:7d0cd9cf73e64a996bb881a1ebd01633fc5a6d11c36ea27f7b5e251dc45476e7"}, + {file = "Flask-SQLAlchemy-3.0.3.tar.gz", hash = "sha256:2764335f3c9d7ebdc9ed6044afaf98aae9fa50d7a074cef55dde307ec95903ec"}, + {file = "Flask_SQLAlchemy-3.0.3-py3-none-any.whl", hash = "sha256:add5750b2f9cd10512995261ee2aa23fab85bd5626061aa3c564b33bb4aa780a"}, ] furo = [ - {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, - {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, + {file = "furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521"}, + {file = "furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5"}, ] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] -GitPython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, +gitpython = [ + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] greenlet = [ - {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, - {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, - {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, - {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, - {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, - {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, - {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, - {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, - {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, - {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, - {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, - {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, - {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, - {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, - {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, - {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, - {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, - {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, - {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, - {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, - {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, - {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, - {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, - {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] identify = [ - {file = "identify-2.5.6-py2.py3-none-any.whl", hash = "sha256:b276db7ec52d7e89f5bc4653380e33054ddc803d25875952ad90b0f012cbcdaa"}, - {file = "identify-2.5.6.tar.gz", hash = "sha256:6c32dbd747aa4ceee1df33f25fed0b0f6e0d65721b15bd151307ff7056d50245"}, + {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"}, + {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -2778,243 +2842,261 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, ] inflection = [ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] isort = [ - {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, - {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] itsdangerous = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jsonschema = [ - {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, - {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] kombu = [ {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, - {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] livereload = [ {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, ] -Mako = [ - {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, - {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, +mako = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, ] -MarkupSafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +markupsafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, ] marshmallow = [ - {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, - {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, + {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, + {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, ] marshmallow-enum = [ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, ] marshmallow-sqlalchemy = [ - {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, - {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, + {file = "marshmallow-sqlalchemy-0.29.0.tar.gz", hash = "sha256:3523a774390ef0c1c0f7c708a7519809c5396cf608720f14f55c36f74ff5bbec"}, + {file = "marshmallow_sqlalchemy-0.29.0-py2.py3-none-any.whl", hash = "sha256:3cee0bf61ed10687c0a41448e1916649b28222334a02f7b937c39d1c69c18bee"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, + {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, + {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, + {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, + {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, + {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, + {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, + {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, + {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, + {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, + {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, + {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, + {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, + {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, + {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, + {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, + {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, + {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, + {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, ] mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] mysql-connector-python = [ {file = "mysql-connector-python-8.0.32.tar.gz", hash = "sha256:c2d20b29fd096a0633f9360c275bd2434d4bcf597281991c4b7f1c820cd07b84"}, @@ -3052,32 +3134,32 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] pbr = [ - {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, - {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] pep8-naming = [ {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, ] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] pre-commit-hooks = [ - {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, - {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, + {file = "pre_commit_hooks-4.4.0-py2.py3-none-any.whl", hash = "sha256:fc8837335476221ccccda3d176ed6ae29fe58753ce7e8b7863f5d0f987328fc6"}, + {file = "pre_commit_hooks-4.4.0.tar.gz", hash = "sha256:7011eed8e1a25cde94693da009cba76392194cecc2f3f06c51a44ea6ad6c2af9"}, ] prometheus-client = [ {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, @@ -3088,8 +3170,8 @@ prometheus-flask-exporter = [ {file = "prometheus_flask_exporter-0.22.3.tar.gz", hash = "sha256:32b152aeb7970cbf04616627fc5bf20d82b0918e54c54f80dc8aaef3349fd333"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, - {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, ] protobuf = [ {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, @@ -3116,17 +3198,19 @@ protobuf = [ {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] psycopg2 = [ - {file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"}, - {file = "psycopg2-2.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c"}, - {file = "psycopg2-2.9.4-cp36-cp36m-win32.whl", hash = "sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a"}, - {file = "psycopg2-2.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb"}, - {file = "psycopg2-2.9.4-cp37-cp37m-win32.whl", hash = "sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb"}, - {file = "psycopg2-2.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61"}, - {file = "psycopg2-2.9.4-cp38-cp38-win32.whl", hash = "sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c"}, - {file = "psycopg2-2.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184"}, - {file = "psycopg2-2.9.4-cp39-cp39-win32.whl", hash = "sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426"}, - {file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"}, - {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, + {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, + {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, + {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, + {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, + {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, + {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, + {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, + {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, + {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, + {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, + {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, + {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, + {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, @@ -3172,55 +3256,61 @@ pycryptodome = [ {file = "pycryptodome-3.17.tar.gz", hash = "sha256:bce2e2d8e82fcf972005652371a3e8731956a0c1fbb719cc897943b3695ad91b"}, ] pydocstyle = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pyjwt = [ {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, ] pylint = [ - {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, - {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, + {file = "pylint-2.17.1-py3-none-any.whl", hash = "sha256:8660a54e3f696243d644fca98f79013a959c03f979992c1ab59c24d3f4ec2700"}, + {file = "pylint-2.17.1.tar.gz", hash = "sha256:d4d009b0116e16845533bc2163493d6681846ac725eab8ca8014afb520178ddd"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, ] pytest-flask = [ {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, @@ -3247,10 +3337,10 @@ pytz-deprecation-shim = [ {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, ] pyupgrade = [ - {file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"}, - {file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"}, + {file = "pyupgrade-3.3.1-py2.py3-none-any.whl", hash = "sha256:3b93641963df022d605c78aeae4b5956a5296ea24701eafaef9c487527b77e60"}, + {file = "pyupgrade-3.3.1.tar.gz", hash = "sha256:f88bce38b0ba92c2a9a5063c8629e456e8d919b67d2d42c7ecab82ff196f9813"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -3293,88 +3383,74 @@ PyYAML = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] regex = [ - {file = "regex-2022.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab69b4fe09e296261377d209068d52402fb85ef89dc78a9ac4a29a895f4e24a7"}, - {file = "regex-2022.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5bc5f921be39ccb65fdda741e04b2555917a4bced24b4df14eddc7569be3b493"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43eba5c46208deedec833663201752e865feddc840433285fbadee07b84b464d"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c68d2c04f7701a418ec2e5631b7f3552efc32f6bcc1739369c6eeb1af55f62e0"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:caa2734ada16a44ae57b229d45091f06e30a9a52ace76d7574546ab23008c635"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef806f684f17dbd6263d72a54ad4073af42b42effa3eb42b877e750c24c76f86"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be319f4eb400ee567b722e9ea63d5b2bb31464e3cf1b016502e3ee2de4f86f5c"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:42bb37e2b2d25d958c25903f6125a41aaaa1ed49ca62c103331f24b8a459142f"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fbc88d3ba402b5d041d204ec2449c4078898f89c4a6e6f0ed1c1a510ef1e221d"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:91e0f7e7be77250b808a5f46d90bf0032527d3c032b2131b63dee54753a4d729"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3652bbe6720786b9137862205986f3ae54a09dec8499a995ed58292bdf77c2"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:878c626cbca3b649e14e972c14539a01191d79e58934e3f3ef4a9e17f90277f8"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6df070a986fc064d865c381aecf0aaff914178fdf6874da2f2387e82d93cc5bd"}, - {file = "regex-2022.3.2-cp310-cp310-win32.whl", hash = "sha256:b549d851f91a4efb3e65498bd4249b1447ab6035a9972f7fc215eb1f59328834"}, - {file = "regex-2022.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:8babb2b5751105dc0aef2a2e539f4ba391e738c62038d8cb331c710f6b0f3da7"}, - {file = "regex-2022.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1977bb64264815d3ef016625adc9df90e6d0e27e76260280c63eca993e3f455f"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e73652057473ad3e6934944af090852a02590c349357b79182c1b681da2c772"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b22ff939a8856a44f4822da38ef4868bd3a9ade22bb6d9062b36957c850e404f"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:878f5d649ba1db9f52cc4ef491f7dba2d061cdc48dd444c54260eebc0b1729b9"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0008650041531d0eadecc96a73d37c2dc4821cf51b0766e374cb4f1ddc4e1c14"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06b1df01cf2aef3a9790858af524ae2588762c8a90e784ba00d003f045306204"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57484d39447f94967e83e56db1b1108c68918c44ab519b8ecfc34b790ca52bf7"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74d86e8924835f863c34e646392ef39039405f6ce52956d8af16497af4064a30"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:ae17fc8103f3b63345709d3e9654a274eee1c6072592aec32b026efd401931d0"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f92a7cdc6a0ae2abd184e8dfd6ef2279989d24c85d2c85d0423206284103ede"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:5dcc4168536c8f68654f014a3db49b6b4a26b226f735708be2054314ed4964f4"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1e30762ddddb22f7f14c4f59c34d3addabc789216d813b0f3e2788d7bcf0cf29"}, - {file = "regex-2022.3.2-cp36-cp36m-win32.whl", hash = "sha256:286ff9ec2709d56ae7517040be0d6c502642517ce9937ab6d89b1e7d0904f863"}, - {file = "regex-2022.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d326ff80ed531bf2507cba93011c30fff2dd51454c85f55df0f59f2030b1687b"}, - {file = "regex-2022.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9d828c5987d543d052b53c579a01a52d96b86f937b1777bbfe11ef2728929357"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87ac58b9baaf50b6c1b81a18d20eda7e2883aa9a4fb4f1ca70f2e443bfcdc57"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6c2441538e4fadd4291c8420853431a229fcbefc1bf521810fbc2629d8ae8c2"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3356afbb301ec34a500b8ba8b47cba0b44ed4641c306e1dd981a08b416170b5"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d96eec8550fd2fd26f8e675f6d8b61b159482ad8ffa26991b894ed5ee19038b"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf668f26604e9f7aee9f8eaae4ca07a948168af90b96be97a4b7fa902a6d2ac1"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb0e2845e81bdea92b8281a3969632686502565abf4a0b9e4ab1471c863d8f3"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:87bc01226cd288f0bd9a4f9f07bf6827134dc97a96c22e2d28628e824c8de231"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:09b4b6ccc61d4119342b26246ddd5a04accdeebe36bdfe865ad87a0784efd77f"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:9557545c10d52c845f270b665b52a6a972884725aa5cf12777374e18f2ea8960"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:0be0c34a39e5d04a62fd5342f0886d0e57592a4f4993b3f9d257c1f688b19737"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7b103dffb9f6a47ed7ffdf352b78cfe058b1777617371226c1894e1be443afec"}, - {file = "regex-2022.3.2-cp37-cp37m-win32.whl", hash = "sha256:f8169ec628880bdbca67082a9196e2106060a4a5cbd486ac51881a4df805a36f"}, - {file = "regex-2022.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:4b9c16a807b17b17c4fa3a1d8c242467237be67ba92ad24ff51425329e7ae3d0"}, - {file = "regex-2022.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:67250b36edfa714ba62dc62d3f238e86db1065fccb538278804790f578253640"}, - {file = "regex-2022.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5510932596a0f33399b7fff1bd61c59c977f2b8ee987b36539ba97eb3513584a"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f7ee2289176cb1d2c59a24f50900f8b9580259fa9f1a739432242e7d254f93"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d7a68fa53688e1f612c3246044157117403c7ce19ebab7d02daf45bd63913e"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf5317c961d93c1a200b9370fb1c6b6836cc7144fef3e5a951326912bf1f5a3"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad397bc7d51d69cb07ef89e44243f971a04ce1dca9bf24c992c362406c0c6573"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:297c42ede2c81f0cb6f34ea60b5cf6dc965d97fa6936c11fc3286019231f0d66"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:af4d8cc28e4c7a2f6a9fed544228c567340f8258b6d7ea815b62a72817bbd178"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:452519bc4c973e961b1620c815ea6dd8944a12d68e71002be5a7aff0a8361571"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cb34c2d66355fb70ae47b5595aafd7218e59bb9c00ad8cc3abd1406ca5874f07"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d146e5591cb67c5e836229a04723a30af795ef9b70a0bbd913572e14b7b940f"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:03299b0bcaa7824eb7c0ebd7ef1e3663302d1b533653bfe9dc7e595d453e2ae9"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ccb0a4ab926016867260c24c192d9df9586e834f5db83dfa2c8fffb3a6e5056"}, - {file = "regex-2022.3.2-cp38-cp38-win32.whl", hash = "sha256:f7e8f1ee28e0a05831c92dc1c0c1c94af5289963b7cf09eca5b5e3ce4f8c91b0"}, - {file = "regex-2022.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35ed2f3c918a00b109157428abfc4e8d1ffabc37c8f9abc5939ebd1e95dabc47"}, - {file = "regex-2022.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:55820bc631684172b9b56a991d217ec7c2e580d956591dc2144985113980f5a3"}, - {file = "regex-2022.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:83f03f0bd88c12e63ca2d024adeee75234d69808b341e88343b0232329e1f1a1"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42d6007722d46bd2c95cce700181570b56edc0dcbadbfe7855ec26c3f2d7e008"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:320c2f4106962ecea0f33d8d31b985d3c185757c49c1fb735501515f963715ed"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd3fe37353c62fd0eb19fb76f78aa693716262bcd5f9c14bb9e5aca4b3f0dc4"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e51ad1e6131c496b58d317bc9abec71f44eb1957d32629d06013a21bc99cac"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72bc3a5effa5974be6d965ed8301ac1e869bc18425c8a8fac179fbe7876e3aee"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e5602a9b5074dcacc113bba4d2f011d2748f50e3201c8139ac5b68cf2a76bd8b"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:729aa8ca624c42f309397c5fc9e21db90bf7e2fdd872461aabdbada33de9063c"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d6ecfd1970b3380a569d7b3ecc5dd70dba295897418ed9e31ec3c16a5ab099a5"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:13bbf0c9453c6d16e5867bda7f6c0c7cff1decf96c5498318bb87f8136d2abd4"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:58ba41e462653eaf68fc4a84ec4d350b26a98d030be1ab24aba1adcc78ffe447"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c0446b2871335d5a5e9fcf1462f954586b09a845832263db95059dcd01442015"}, - {file = "regex-2022.3.2-cp39-cp39-win32.whl", hash = "sha256:20e6a27959f162f979165e496add0d7d56d7038237092d1aba20b46de79158f1"}, - {file = "regex-2022.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9efa41d1527b366c88f265a227b20bcec65bda879962e3fc8a2aee11e81266d7"}, - {file = "regex-2022.3.2.tar.gz", hash = "sha256:79e5af1ff258bc0fe0bdd6f69bc4ae33935a898e3cbefbbccf22e88a27fa053b"}, + {file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"}, + {file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"}, + {file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"}, + {file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"}, + {file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"}, + {file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"}, + {file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"}, + {file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"}, + {file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"}, + {file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"}, + {file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"}, + {file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"}, + {file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"}, + {file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"}, + {file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"}, + {file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"}, + {file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"}, ] reorder-python-imports = [ {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, ] requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] restrictedpython = [ {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"}, @@ -3383,7 +3459,7 @@ restrictedpython = [ restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] -"ruamel.yaml" = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] @@ -3424,79 +3500,103 @@ ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] safety = [ - {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, - {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, + {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"}, + {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.16.0.tar.gz", hash = "sha256:a900845bd78c263d49695d48ce78a4bce1030bbd917e0b6cc021fc000c901113"}, - {file = "sentry_sdk-1.16.0-py2.py3-none-any.whl", hash = "sha256:633edefead34d976ff22e7edc367cdf57768e24bc714615ccae746d9d91795ae"}, + {file = "sentry-sdk-1.18.0.tar.gz", hash = "sha256:d07b9569a151033b462f7a7113ada94cc41ecf49daa83d35f5f852a0b9cf3b44"}, + {file = "sentry_sdk-1.18.0-py2.py3-none-any.whl", hash = "sha256:714203a9adcac4a4a35e348dc9d3e294ad0200a66cdca26c068967d728f34fcb"}, ] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, + {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, + {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, ] simplejson = [ - {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, - {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, - {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, - {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, - {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, - {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, - {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, - {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, - {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, - {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, - {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, - {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, - {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, - {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, + {file = "simplejson-3.18.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:8f381747c2edebe3c750a571e55103bfcc33b2707a9b91ae033ab9ba718d976a"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:094275b1b8f003afce1167c8a674cd1ee2fd48c566632dac5d149901d5012ff8"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:676e8c182f8079851f12ae1cee2fcebe04def2da2a5703a9d747ab125af47732"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4b5df4ee48403885046c6f4fd8adc84c4ac0adec69482f22a17bd4ba52876341"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:edb334cab35dcd90eb563fdacb085f10e5dd0b1acb57fa43f8933308b42a8f88"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:b6c6cfc492710d8f0303705fa1ff7bb3d6a145f523384e45a6f3b13ada37021f"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ced906b172bfad62736a27cfafcb6e24bc9938533b0529ff8150f7926fe35b54"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7701a289d45fdfeb37f1d15cf638801cea439df667a613379443772a86e82936"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e2f87a483c4ab0bb2a9adc9ca09173e7f7cf3696e4fa67bd45a6b33181e57921"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c0444423129df448788edc66a129bc7560ad7d6a661d74f0900959c0b44349a1"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a86bc9c8a913a4e0ffab85c563a7505cdf4bd13fba05342f8314facc0b7586"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2fa1ee5ca34ab2ecfbe3f7a7e952a1ecaebb5b4818f002b5b146324912ac3d5"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b17026f3f349a6e87818cd3531e3bbb5cc78a6f4b2b6718f574a8e0512d71e08"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a255d30cda6334ba780eb40a56e8134efd3453948b995d3966e45212e34bf018"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9f0dfde448611f4f818da05f9b544a78f29355dc39151b0dad8e7c65c513e4f"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1085cadec0f7e76377951d7a87744628c90ac6cc634fc97eecce0c4d41ec563"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f15f56b3119fb71fa57eb4613bcd87eb7df6c2f3547de7d341853d3e50cef97e"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:695da62e494e4689ab78fae173a78390a175b6a5ccc4292277ce0f8dba3945d5"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:097e48686e49026836ef384c7c10ca670acc023cb16a976a689c2eb6c1852df4"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a56005332d70b8d02d476d4a85818b27b01e51dac1a21d5c1a1d8a5df2efb4a6"}, + {file = "simplejson-3.18.4-cp310-cp310-win32.whl", hash = "sha256:3d549efc7e8f9a180c59462b124991b690ff25c235d5cf495c3246c66a7679cd"}, + {file = "simplejson-3.18.4-cp310-cp310-win_amd64.whl", hash = "sha256:bd694c465cc61fa8e599355e535b6eb561279834d9883aeef08d0e86c44c300c"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad37f25fd8dfbed80815c3281b82a165be2a74e663856b9a50099d18789987bc"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2362c66d2c633925d90f2f177f05e0570d320d986130d34dff9ad6edbf7be8ac"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30e381471158290ccb79bd31e7bbda4c8f2cf7e1a5f6b557c1b97d6036ccd05b"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d45ed9452a42064805143480397b586ea2ea322f4b8b69034c51181e7f38342"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0dcc54e7cfbd9674ec4ca181e26eaa5b038446601faeaa6c83d146ddef2f2652"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a668d4a93816fb8a644e90e7987aa3beeb9d2112ca50a474d41e6acb5bb88a"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da6dc0cb00ef1e1a8daf285074ca8b2bb89591170c42ceab0c37bcdb9adc802c"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f31e126204ec38f92dee119af87cf881044ef7dea6f7477ef774ed3d84199c24"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fb0f8b35c11fd8e4b924f974d331b20fa54555282451db7f2a3b24bd2d33cc11"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:2d1b47f768e1f4c1c8a9457effabed735939401e85c0ddcdf68444c88a9242e6"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d65ea4582b47d77e9094c22eb0aeded0ebd96c1df86e988870b40c6514c6e21"}, + {file = "simplejson-3.18.4-cp311-cp311-win32.whl", hash = "sha256:32de1672f91a789cc9e1c36c406b2d75457a242d64e9e73a70b9b814ef00095e"}, + {file = "simplejson-3.18.4-cp311-cp311-win_amd64.whl", hash = "sha256:c37b092d29741096c4723f48924a80b1d3de62ca1de254ce88178fa083dd520c"}, + {file = "simplejson-3.18.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:706a7fc81ceeb321a1040d008b134056012188f95a5c31ad94fb03153b35cc84"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab64f087c5863ac621b42e227e5a43bd9b28de581afe7be12ad96562b9be8203"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f27a079cb009ba569983061a50a9270b7e1d35f81e4eeaf0e26f8924027e550"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ba80fbf959b5852554f23201a5f4b30885930c303546ffa883859a435ea3cf"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdb5069870f7d26a34e5adc30672d0a7b26e652720530a023bb3a8d8a42e37f"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:340b7d085b4a5063aacb8664b1250e4a7426c16e1cc80705c548a229153af147"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b9893852c559998f667e6434d2c2474518d4cdfd1b9cec8e57b3c9d577ba55c1"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:efae49d0148ec68b6e012f1b9e19bd530f4dced378ba919e3e906ae2b829cc31"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a89d7fe994b115f0a792e6673f387af3db812a1760d594abad51e0ea11d3e470"}, + {file = "simplejson-3.18.4-cp36-cp36m-win32.whl", hash = "sha256:44058bea97429cfa0d6fb1d8eb0736a77022f34a326d5bc64fd6fed8d9304571"}, + {file = "simplejson-3.18.4-cp36-cp36m-win_amd64.whl", hash = "sha256:f85d87986ca375b8305b5c4f166783b8db383a6469e8b99b8dba22878388f234"}, + {file = "simplejson-3.18.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a3bba99178f1b25878752a8bc6da2f93fbae754ebd4914d2ac4b869b9fb24102"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5f67bffa6fc68e391b2250e1feb43d534ded64a7b918eb89cf7e3e679759d94"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8ac155e3fd3b54a63040df024e57e62c130b15a2fc66eff3c2a946f42beed52"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:682b202f56d9d9e1bb22eaca3e37321002223fd5ddef7189b9233e3c14079917"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dbfaa79b1c0efdb768392a19110f1aff793f3e8d43f57e292f46734b8affb45"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7339bd6203351555c1e728acd601ba95ebce0f6041ebdb386e025f00af3f1769"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:544e5607142d66a469ecf78a3154ec0f915834dc3b8cfdb2677a78ca58319ad6"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:56d36f47bc7c7684504f0f18feb161a0b1162546b3622e45aa6155f8285180ac"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b482d1fdd8f860e743c7de8cd6dfe54fb9fe8cd6ccba29e2966912ac89e17b2f"}, + {file = "simplejson-3.18.4-cp37-cp37m-win32.whl", hash = "sha256:313dfd911723dc3022fed7050a7b315d5d0681cd56eee08e44e2cbd39fd9ad81"}, + {file = "simplejson-3.18.4-cp37-cp37m-win_amd64.whl", hash = "sha256:f5e0a03e533313eee9437ccc6c4eab47369f17bc919b57df4a20ccd8bc85d8fd"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c4f59dd358c3a99efa46d62dc1583be3a1c37171f5240c4cbdc2d5838870902"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:041dd69026284d10f035cefb4a75026d2cfcef31f31e62585eeb2b7776e7e047"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47509775a5c41ec2a6cd17c9c00fc14965cad8e6670059663872ba5e39332f57"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b425a857ce52e651739314e4118fc68bd702ef983148b8fd5cb6f68bb6a020"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deb71e6166e4f1264174d78b5b88abd52b14c6649e6eabaf9cf93cb1c7362850"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827ddc3b3603f7d0421b054388da6face7871d800c4b3bbedeedc8778e4085ea"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc74a9ef4d61e18ee6f1886b6ef1fe285b1f432885288afacfb7402f7d469448"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16fbebfc38ad4285c256d2430797fd669b0437d090e985c6d443521d4303b133"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7d3f7cd57ce0c6a5bb8133f8ed5c3d1be0473a88b7d91a300626298f12d0999"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b43d3c2e204d709af955bdb904ae127fe137363ace87fbf7dc8fe6017f7f8449"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab5941e1fd509fc151258477ef4b663fe14c94f8faf3581827bf4b02080fd4ba"}, + {file = "simplejson-3.18.4-cp38-cp38-win32.whl", hash = "sha256:a1163bfe5d043c20adeb5c4c8e89dd1dd39b375c8ca6f1c1e35ec537ad7a12e7"}, + {file = "simplejson-3.18.4-cp38-cp38-win_amd64.whl", hash = "sha256:8ccc982197982cdda19e3e5ba4ef7f6ad6bed3eb39bb423bfbf7fa2cd29488ab"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01f426ee9e3a2d205aa4c22c3da996b51f2de75c4199ef703258a28b304dea8c"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46b8cc86204b51eddcf157cbaf3c44a20f24393030442af0909eeb961186cb67"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:65de5876e34780b43f92d9d2539de16ecc56d16f56e56e59b34adfa1cebe064f"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa6fe8fa94a831886ee164ac03514f361e1387a62a1b9da32fde5c0c1f27fa8d"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a50a9da1cf93e35f26c4ddee162abf3184a340339ec2d4001c34607b87e71b4"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2285609b4edbf9957440642493788ebef6583042b3fb96217c2e71f29bc6d80"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b217201efc007166e24e9a282007cc208a2d059350a7c5bd0b0303460ad3019"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cc9a47bf8cde85c99db5f4a919bb756e62427ade0f2e875a6ec89ae8492d486"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e042ae053e05fe193514d51d6b0f0243729961901e9a75f8b596bfaf69522c52"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d0d3b9f7cee233368d92c89746dde74313abafaa3ec1f0c06a3f4f164dc27bcc"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1844d7782652f859d9648531778582d4842d80cfff8d334eb23bb8da0d22a1b0"}, + {file = "simplejson-3.18.4-cp39-cp39-win32.whl", hash = "sha256:2a6e5c0e0817fb20dbb880c83caebbd4ef39f1901f6f8e53b73a3c74de4e5172"}, + {file = "simplejson-3.18.4-cp39-cp39-win_amd64.whl", hash = "sha256:34d95ad8e27754f0d91917600d6ea273e05c82a71021f168c45be48637d9502f"}, + {file = "simplejson-3.18.4-py3-none-any.whl", hash = "sha256:03de1ec4ad734f28ca49b0a758b997d752be0d089ed30360157c4e8811999c8f"}, + {file = "simplejson-3.18.4.tar.gz", hash = "sha256:6197cfebe659ac802a686b5408494115a7062b45cdf37679c4d6a9d4f39649b7"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -3511,16 +3611,16 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] sphinx-autoapi = [ - {file = "sphinx-autoapi-2.0.0.tar.gz", hash = "sha256:97dcf1b5b54cd0d8efef867594e4a4f3e2d3a2c0ec1e5a891e0a61bc77046006"}, - {file = "sphinx_autoapi-2.0.0-py2.py3-none-any.whl", hash = "sha256:dab2753a38cad907bf4e61473c0da365a26bfbe69fbf5aa6e4f7d48e1cf8a148"}, + {file = "sphinx-autoapi-2.1.0.tar.gz", hash = "sha256:5b5c58064214d5a846c9c81d23f00990a64654b9bca10213231db54a241bc50f"}, + {file = "sphinx_autoapi-2.1.0-py2.py3-none-any.whl", hash = "sha256:b25c7b2cda379447b8c36b6a0e3bdf76e02fd64f7ca99d41c6cbdf130a01768f"}, ] sphinx-autobuild = [ {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, @@ -3531,20 +3631,20 @@ sphinx-basic-ng = [ {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, ] sphinx-click = [ - {file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"}, - {file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"}, + {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, + {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, ] sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] sphinxcontrib-devhelp = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] sphinxcontrib-jsmath = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, @@ -3559,61 +3659,61 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] SpiffWorkflow = [] -SQLAlchemy = [ - {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, - {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, +sqlalchemy = [ + {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7917632606fc5d4be661dcde45cc415df835e594e2c50cc999a44f24b6bf6d92"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32f508fef9c5a7d19411d94ef64cf5405e42c4689e51ddbb81ac9a7be045cce8"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0995b92612979d208189245bf87349ad9243b97b49652347a28ddee0803225a"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cebd161f964af58290596523c65e41a5a161a99f7212b1ae675e288a4b5e0a7c"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c38641f5c3714505d65dbbd8fb1350408b9ad8461769ec8e440e1177f9c92d1d"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:921485d1f69ed016e1f756de67d02ad4f143eb6b92b9776bfff78786d8978ab5"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-win32.whl", hash = "sha256:a65a8fd09bdffd63fa23b39cd902e6a4ca23d86ecfe129513e43767a1f3e91fb"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:d2e7411d5ea164c6f4d003f5d4f5e72e202956aaa7496b95bb4a4c39669e001c"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:432cfd77642771ee7ea0dd0f3fb664f18506a3625eab6e6d5d1d771569171270"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce076e25f1170000b4ecdc57a1ff8a70dbe4a5648ec3da0563ef3064e8db4f15"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14854bdb2a35af536d14f77dfa8dbc20e1bb1972996d64c4147e0d3165c9aaf5"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9020125e3be677c64d4dda7048e247343f1663089cf268a4cc98c957adb7dbe0"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fb649c5473f79c9a7b6133f53a31f4d87de14755c79224007eb7ec76e628551e"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33f73cc45ffa050f5c3b60ff4490e0ae9e02701461c1600d5ede1b008076b1b9"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-win32.whl", hash = "sha256:0789e199fbce8cb1775337afc631ed12bcc5463dd77d7a06b8dafd758cde51f8"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:013f4f330001e84a2b0ef1f2c9bd73169c79d582e54e1a144be1be1dbc911711"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4339110be209fea37a2bb4f35f1127c7562a0393e9e6df5d9a65cc4f5c167cb6"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e61e2e4dfe175dc3510889e44eda1c32f55870d6950ef40519640cb266704d"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d44ff7573016fc26311b5a5c54d5656fb9e0c39e138bc8b81cb7c8667485203"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:57b80e877eb6ec63295835f8a3b86ca3a44829f80c4748e1b019e03adea550fc"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e90f0be674e0845c5c1ccfa5e31c9ee28fd406546a61afc734355cc7ea1f8f8b"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-win32.whl", hash = "sha256:e735a635126b2338dfd3a0863b675437cb53d85885a7602b8cffb24345df33ed"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-win_amd64.whl", hash = "sha256:ea1c63e61b5c13161c8468305f0a5837c80aae2070e33654c68dd12572b638eb"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cc337b96ec59ef29907eeadc2ac11188739281568f14c719e61550ca6d201a41"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0eac488be90dd3f7a655d2e34fa59e1305fccabc4abfbd002e3a72ae10bd2f89"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ab8f90f4a13c979e6c41c9f011b655c1b9ae2df6cffa8fa2c7c4d740f3512e"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc370d53fee7408330099c4bcc2573a107757b203bc61f114467dfe586a0c7bd"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:494db0026918e3f707466a1200a5dedbf254a4bce01a3115fd95f04ba8258f09"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:486015a58c9a67f65a15b4f19468b35b97cee074ae55386a9c240f1da308fbfe"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-win32.whl", hash = "sha256:5f7c40ec2e3b31293184020daba95850832bea523a08496ac89b27a5276ec804"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:3da3dff8d9833a7d7f66a3c45a79a3955f775c79f47bb7eea266d0b4c267b17a"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:774965c41b71c8ebe3c5728bf5b9a948231fc3a0422d9fdace0686f5bb689ad6"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94556a2a7fc3de094ea056b62845e2e6e271e26d1e1b2540a1cd2d2506257a10"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f15c54713a8dd57a01c974c9f96476688f6f6374d348819ed7e459535844b614"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea9461f6955f3cf9eff6eeec271686caed7792c76f5b966886a36a42ea46e6b2"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18795e87601b4244fd08b542cd6bff9ef674b17bcd34e4a3c9935398e2cc762c"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0b698440c477c00bdedff87348b19a79630a235864a8f4378098d61079c16ce9"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-win32.whl", hash = "sha256:38e26cf6b9b4c6c37846f7e31b42e4d664b35f055691265f07e06aeb6167c494"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:a6f7d1debb233f1567d700ebcdde0781a0b63db0ef266246dfbf75ae41bfdf85"}, + {file = "SQLAlchemy-2.0.7-py3-none-any.whl", hash = "sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"}, + {file = "SQLAlchemy-2.0.7.tar.gz", hash = "sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649"}, ] sqlalchemy-stubs = [] stevedore = [ - {file = "stevedore-4.0.1-py3-none-any.whl", hash = "sha256:01645addb67beff04c7cfcbb0a6af8327d2efc3380b0f034aa316d4576c4d470"}, - {file = "stevedore-4.0.1.tar.gz", hash = "sha256:9a23111a6e612270c591fd31ff3321c6b5f3d5f3dabb1427317a5ab608fc261a"}, + {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, + {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] swagger-ui-bundle = [ {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, ] tokenize-rt = [ - {file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"}, - {file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"}, + {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, + {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, @@ -3624,8 +3724,8 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tomlkit = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, + {file = "tomlkit-0.11.7-py3-none-any.whl", hash = "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c"}, + {file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"}, ] tornado = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, @@ -3649,152 +3749,163 @@ types-click = [ {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, ] types-dateparser = [ - {file = "types-dateparser-1.1.4.1.tar.gz", hash = "sha256:0f76578bbae15c8b8701b5efd94db98a97ce0a27aedfe6f14a531170de6db97d"}, - {file = "types_dateparser-1.1.4.1-py3-none-any.whl", hash = "sha256:dd7b2343bb06225c0e358533609b66a8edfb95e5426d8f658664e7d0f27dea68"}, + {file = "types-dateparser-1.1.4.9.tar.gz", hash = "sha256:506668f024c2136a44e9046ee18dd4279a55df1be5dc55e5c29ab07643a2e18a"}, + {file = "types_dateparser-1.1.4.9-py3-none-any.whl", hash = "sha256:6539e49032151a8445092109f93e61f51b2082a9f295691df13e073c6abf9137"}, ] -types-Flask = [ +types-flask = [ {file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"}, {file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"}, ] -types-Jinja2 = [ +types-jinja2 = [ {file = "types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81"}, {file = "types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2"}, ] -types-MarkupSafe = [ +types-markupsafe = [ {file = "types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1"}, {file = "types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5"}, ] types-pytz = [ - {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, - {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, + {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, + {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, ] -types-PyYAML = [ - {file = "types-PyYAML-6.0.12.tar.gz", hash = "sha256:f6f350418125872f3f0409d96a62a5a5ceb45231af5cc07ee0034ec48a3c82fa"}, - {file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"}, +types-pyyaml = [ + {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, + {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, ] types-requests = [ - {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, - {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, + {file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"}, + {file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, - {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, + {file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"}, + {file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"}, ] -types-Werkzeug = [ +types-werkzeug = [ {file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"}, {file = "types_Werkzeug-1.0.9-py3-none-any.whl", hash = "sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec"}, ] typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] tzdata = [ - {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, - {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, + {file = "tzdata-2023.2-py2.py3-none-any.whl", hash = "sha256:905ae9e6744dd9ef5ce94d2aaa2dd00282fee38b670b2133407f23c388f110a1"}, + {file = "tzdata-2023.2.tar.gz", hash = "sha256:c3b51b235b07f9f1889089c2264bcbeaaba260a63f89bea09e350ea4205eb95f"}, ] tzlocal = [ - {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, - {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, + {file = "tzlocal-4.3-py3-none-any.whl", hash = "sha256:b44c4388f3d34f25862cfbb387578a4d70fec417649da694a132f628a23367e2"}, + {file = "tzlocal-4.3.tar.gz", hash = "sha256:3f21d09e1b2aa9f2dacca12da240ca37de3ba5237a93addfd6d593afe9073355"}, ] -Unidecode = [ +unidecode = [ {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] vine = [ {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] virtualenv = [ - {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"}, - {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"}, + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, ] wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] -Werkzeug = [ - {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, - {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, +werkzeug = [ + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, ] wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -WTForms = [ +wtforms = [ {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, ] xdoctest = [ - {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, - {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, + {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"}, + {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"}, ] zipp = [ - {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, - {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index eb991a5c..2104664e 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -29,7 +29,7 @@ flask-restful = "*" werkzeug = "*" SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} -# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } +# SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} @@ -39,10 +39,16 @@ pytest-flask = "^1.2.0" pytest-flask-sqlalchemy = "^1.1.0" psycopg2 = "^2.9.3" typing-extensions = "^4.4.0" + +# pinned to higher than 65.5.0 because of a vulnerability +# and to lower than 67 because i didn't feel like addressing +# new deprecation warnings. we don't need this library explicitly, +# but at one time it was pulled in by various libs we depend on. +setuptools = "^65.5.1" + connexion = {extras = [ "swagger-ui",], version = "^2"} lxml = "^4.9.1" marshmallow-enum = "^1.5.1" -marshmallow-sqlalchemy = "^0.28.0" PyJWT = "^2.6.0" gunicorn = "^20.1.0" APScheduler = "*" @@ -77,6 +83,9 @@ flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" prometheus-flask-exporter = "^0.22.3" +safety = "^2.3.5" +sqlalchemy = "^2.0.7" +marshmallow-sqlalchemy = "^0.29.0" [tool.poetry.dev-dependencies] pytest = "^7.1.2" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 591ed9fc..3619e38f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -46,8 +46,9 @@ class MyJSONEncoder(DefaultJSONProvider): return obj.serialized elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore return_dict = {} - for row_key in obj.keys(): - row_value = obj[row_key] + row_mapping = obj._mapping + for row_key in row_mapping.keys(): + row_value = row_mapping[row_key] if hasattr(row_value, "serialized"): return_dict.update(row_value.serialized) elif hasattr(row_value, "__dict__"): diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index b71bed93..7b97781e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -901,24 +901,24 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances @@ -954,24 +954,24 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances @@ -1176,7 +1176,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{to_task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1190,12 +1190,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step - in: query - required: false - description: Reset the process to this state + - name: to_task_guid + in: path + required: true + description: Get the tasks only up to the given guid. schema: - type: integer + type: string post: operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset summary: Reset a process instance to an earlier step @@ -1251,9 +1251,16 @@ paths: $ref: "#/components/schemas/OkTrue" /process-instances/reports/columns: + parameters: + - name: process_model_identifier + in: query + required: false + description: The process model identifier to filter by + schema: + type: string get: operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list - summary: Returns all available columns for a process instance report. + summary: Returns all available columns for a process instance report, including custom metadata tags: - Process Instances responses: @@ -1573,7 +1580,7 @@ paths: items: $ref: "#/components/schemas/Task" - /task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1587,15 +1594,15 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step + - name: task_guid in: path required: true - description: If set will return the tasks as they were during a specific step of execution. + description: The unique id of the task. schema: - type: integer + type: string get: operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show - summary: Get task data for a single task in a spiff step. + summary: Get task data for a single task. tags: - Process Instances responses: @@ -1605,35 +1612,8 @@ paths: application/json: schema: $ref: "#/components/schemas/Task" - - /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}: - parameters: - - name: modified_process_model_identifier - in: path - required: true - description: The modified id of an existing process model - schema: - type: string - - name: process_instance_id - in: path - required: true - description: The unique id of an existing process instance. - schema: - type: integer - - name: task_id - in: path - required: true - description: The unique id of the task. - schema: - type: string - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update + operationId: spiffworkflow_backend.routes.tasks_controller.task_data_update summary: Update the task data for requested instance and task tags: - Process Instances @@ -1738,7 +1718,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1752,14 +1732,14 @@ paths: description: The unique id of the process instance schema: type: string - - name: task_id + - name: task_guid in: path required: true description: The unique id of the task. schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task + operationId: spiffworkflow_backend.routes.tasks_controller.manual_complete_task summary: Mark a task complete without executing it tags: - Process Instances @@ -1838,9 +1818,9 @@ paths: schema: $ref: "#/components/schemas/ServiceTask" - /tasks/{process_instance_id}/{task_id}: + /tasks/{process_instance_id}/{task_guid}: parameters: - - name: task_id + - name: task_guid in: path required: true description: The unique id of an existing process group. @@ -1852,10 +1832,10 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: terminate_loop + - name: save_as_draft in: query required: false - description: Terminate the loop on a looping task + description: Save the data to task but do not complete it. schema: type: boolean get: @@ -2109,6 +2089,37 @@ paths: schema: $ref: "#/components/schemas/Secret" + /connector-proxy/type-ahead/{category}: + parameters: + - name: category + in: path + required: true + description: The category for the type-ahead search + schema: + type: string + - name: prefix + in: query + required: true + description: The prefix to search for + schema: + type: string + - name: limit + in: query + required: true + description: The maximum number of search results + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.connector_proxy_controller.type_ahead + summary: Return type ahead search results + tags: + - Type Ahead + responses: + "200": + description: We return type ahead search results + #content: + # - application/json + components: securitySchemes: jwt: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 5c51e294..dec4c444 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -41,6 +41,10 @@ SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="ht SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get( "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004" ) +SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL = environ.get( + "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL", + default="https://emehvlxpwodjawtgi7ctkbvpse0vmaow.lambda-url.us-east-1.on.aws", +) # Open ID server # use "http://localhost:7000/openid" for running with simple openid @@ -139,13 +143,5 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get( "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy" ) -SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES = int( - environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES", default="3") -) - -SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS = int( - environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS", default="1") -) - # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py deleted file mode 100644 index c9694489..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Demo environment.""" -from os import environ - -SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True -SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "demo" -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = "demo@example.com" -SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", - default="terraform_deployed_environment.yml", -) - -SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( - environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" -) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py deleted file mode 100644 index ef025060..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Dev.""" -from os import environ - -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="staging" -) -SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_USERNAME", default="sartography-automated-committer" -) -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL", - default="sartography-automated-committer@users.noreply.github.com", -) -SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml deleted file mode 100644 index a556c013..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml +++ /dev/null @@ -1,151 +0,0 @@ -default_group: everybody - -groups: - admin: - users: - [ - admin@spiffworkflow.org, - jakub@status.im, - jarrad@status.im, - kb@sartography.com, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - Finance Team: - users: - [ - jakub@status.im, - amir@status.im, - jarrad@status.im, - sasha@status.im, - fin@status.im, - fin1@status.im, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - demo: - users: - [ - harmeet@status.im, - sasha@status.im, - manuchehr@status.im, - core@status.im, - fin@status.im, - fin1@status.im, - lead@status.im, - lead1@status.im, - ] - - test: - users: - [ - natalia@sartography.com, - ] - -permissions: - admin: - groups: [admin] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /* - - # open system defaults for everybody - read-all-process-groups: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-groups/* - read-all-process-models: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-models/* - - # basic perms for everybody - read-all-process-instances-for-me: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/* - read-process-instance-reports: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-instances/reports/* - processes-read: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /processes - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /service-tasks - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /tasks/* - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /user-groups/for-current-user - - - finance-admin: - groups: ["Finance Team"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-groups/manage-procurement:procurement:* - - manage-revenue-streams-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:vendor-lifecycle-management:* - - manage-revenue-streams-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* - - create-test-instances: - groups: ["test"] - users: [] - allowed_permissions: [create, read] - uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/local_development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/local_development.yml index 558b9eaf..049c991e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/local_development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/local_development.yml @@ -1,70 +1,8 @@ default_group: everybody -users: - admin: - email: admin@spiffworkflow.org - password: admin - preferred_username: Admin - groups: admin: - users: - [ - admin@spiffworkflow.org, - jakub@status.im, - jarrad@status.im, - kb@sartography.com, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - Finance Team: - users: - [ - jakub@status.im, - amir@status.im, - jarrad@status.im, - sasha@status.im, - fin@status.im, - fin1@status.im, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - demo: - users: - [ - harmeet@status.im, - sasha@status.im, - manuchehr@status.im, - core@status.im, - fin@status.im, - fin1@status.im, - lead@status.im, - lead1@status.im, - ] - - test: - users: - [ - natalia@sartography.com, - ] - - admin-ro: - users: - [ - j@sartography.com, - ] + users: [admin@spiffworkflow.org] permissions: admin: @@ -72,107 +10,3 @@ permissions: users: [] allowed_permissions: [create, read, update, delete] uri: /* - admin-readonly: - groups: [admin-ro] - users: [] - allowed_permissions: [read] - uri: /* - admin-process-instances-for-readonly: - groups: [admin-ro] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-instances/* - - # open system defaults for everybody - read-all-process-groups: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-groups/* - read-all-process-models: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-models/* - - # basic perms for everybody - read-all-process-instances-for-me: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/* - read-process-instance-reports: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-instances/reports/* - read-process-instances-find-by-id: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-instances/find-by-id/* - processes-read: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /processes - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /service-tasks - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /tasks/* - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /user-groups/for-current-user - - - finance-admin: - groups: ["Finance Team"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-groups/manage-procurement:procurement:* - - manage-revenue-streams-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:vendor-lifecycle-management:* - - manage-revenue-streams-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* - - create-test-instances: - groups: ["test"] - users: [] - allowed_permissions: [create, read] - uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml deleted file mode 100644 index 049c991e..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml +++ /dev/null @@ -1,12 +0,0 @@ -default_group: everybody - -groups: - admin: - users: [admin@spiffworkflow.org] - -permissions: - admin: - groups: [admin] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml deleted file mode 100644 index 9816ca93..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml +++ /dev/null @@ -1,148 +0,0 @@ -default_group: everybody - -groups: - admin: - users: - [ - admin@spiffworkflow.org, - jakub@status.im, - jarrad@status.im, - kb@sartography.com, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - Finance Team: - users: - [ - jakub@status.im, - amir@status.im, - jarrad@status.im, - sasha@status.im, - fin@status.im, - fin1@status.im, - alex@sartography.com, - dan@sartography.com, - mike@sartography.com, - jason@sartography.com, - j@sartography.com, - elizabeth@sartography.com, - jon@sartography.com, - ] - - demo: - users: - [ - harmeet@status.im, - sasha@status.im, - manuchehr@status.im, - core@status.im, - fin@status.im, - fin1@status.im, - lead@status.im, - lead1@status.im, - ] - test: - users: - [ - natalia@sartography.com, - ] - -permissions: - admin: - groups: [admin] - users: [] - allowed_permissions: [read] - uri: /* - admin-process-instances: - groups: [admin] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-instances/* - - # open system defaults for everybody - read-all-process-groups: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-groups/* - read-all-process-models: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-models/* - - # basic perms for everybody - read-all-process-instances-for-me: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/* - read-process-instance-reports: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /process-instances/reports/* - processes-read: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /processes - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /service-tasks - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /tasks/* - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /user-groups/for-current-user - - manage-revenue-streams-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances: - groups: ["demo"] - users: [] - allowed_permissions: [create] - uri: /process-instances/manage-procurement:vendor-lifecycle-management:* - - manage-revenue-streams-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instances-for-me: - groups: ["demo"] - users: [] - allowed_permissions: [read] - uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* - - create-test-instances: - groups: ["test"] - users: [] - allowed_permissions: [create, read] - uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py deleted file mode 100644 index ac56d84f..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Qa1.""" -from os import environ - -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="qa2" -) -SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_USERNAME", default="sartography-automated-committer" -) -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL", - default=f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com", -) -SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml" -) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py index b5ac6cee..250dba7f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py @@ -1,4 +1,4 @@ -"""Qa2.""" +"""qa2 just here as an example of path based routing for apps.""" from os import environ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py deleted file mode 100644 index 8dd2e1a0..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Default.""" -from os import environ - -environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"] -SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = ( - f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/sartography" -) -SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main") -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", - default="https://github.com/sartography/sartography-process-models.git", -) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py deleted file mode 100644 index 55df0c16..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Staging.""" -from os import environ - -SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging") -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main" -) -SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False -SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py index 1585b577..985047b7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py @@ -5,8 +5,13 @@ from os import environ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"] SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True -SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer" -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com" +SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get( + "SPIFFWORKFLOW_BACKEND_GIT_USERNAME", default="sartography-automated-committer" +) +SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get( + "SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL", + default=f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com", +) SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="terraform_deployed_environment.yml", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index f6b2d391..923ff1f5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -128,7 +128,7 @@ class ApiError(Exception): instance = cls(code, message, status_code=status_code) instance.task_id = task_spec.name or "" instance.task_name = task_spec.description or "" - if task_spec._wf_spec: + if hasattr(task_spec, "_wf_spec") and task_spec._wf_spec: instance.file_name = task_spec._wf_spec.file return instance @@ -158,7 +158,8 @@ class ApiError(Exception): task_trace=exp.task_trace, ) elif isinstance(exp, WorkflowException) and exp.task_spec: - return ApiError.from_task_spec(error_code, message, exp.task_spec) + msg = message + ". " + str(exp) + return ApiError.from_task_spec(error_code, msg, exp.task_spec) else: return ApiError("workflow_error", str(exp)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py index 57108b6c..091dfaff 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py @@ -2,6 +2,7 @@ import time import sqlalchemy +from sqlalchemy.sql import text from spiffworkflow_backend.models.db import db @@ -9,7 +10,7 @@ from spiffworkflow_backend.models.db import db def try_to_connect(start_time: float) -> None: """Try to connect.""" try: - db.first_or_404("select 1") # type: ignore + db.first_or_404(text("select 1")) # type: ignore except sqlalchemy.exc.DatabaseError as exception: if time.time() - start_time > 15: raise exception diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 52e0c573..5e78b4d3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -41,9 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( ) # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 -from spiffworkflow_backend.models.spiff_step_details import ( - SpiffStepDetailsModel, -) # noqa: F401 from spiffworkflow_backend.models.user import UserModel # noqa: F401 from spiffworkflow_backend.models.group import GroupModel # noqa: F401 from spiffworkflow_backend.models.process_instance_metadata import ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 22bdfa70..d5ba53df 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship @@ -8,12 +10,17 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +class BpmnProcessNotFoundError(Exception): + pass + + # properties_json attributes: # "last_task", # guid generated by spiff # "root", # guid generated by spiff # "success", # boolean # "bpmn_messages", # if top-level process # "correlations", # if top-level process +@dataclass class BpmnProcessModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process" id: int = db.Column(db.Integer, primary_key=True) @@ -24,7 +31,8 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + top_level_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + direct_parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py index 7f60d751..90206235 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel # # each subprocess will have its own row in this table. # there is a join table to link them together: bpmn_process_definition_relationship +@dataclass class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py index 096570d8..51126503 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition_relationship" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index f9824f02..a67b7d5a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -53,6 +53,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): """ProcessInstanceModel.""" __tablename__ = "process_instance" + __allow_unmapped__ = True id: int = db.Column(db.Integer, primary_key=True) process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True) process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True) @@ -87,6 +88,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "ProcessInstanceMetadataModel", cascade="delete", ) # type: ignore + process_instance_queue = relationship( + "ProcessInstanceQueueModel", + cascade="delete", + ) # type: ignore start_in_seconds: int | None = db.Column(db.Integer, index=True) end_in_seconds: int | None = db.Column(db.Integer, index=True) @@ -96,7 +101,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_identifier: str = db.Column(db.String(255)) - spiff_step: int = db.Column(db.Integer) bpmn_xml_file_contents: str | None = None process_model_with_diagram_identifier: str | None = None @@ -117,7 +121,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "bpmn_xml_file_contents": self.bpmn_xml_file_contents, "bpmn_version_control_identifier": self.bpmn_version_control_identifier, "bpmn_version_control_type": self.bpmn_version_control_type, - "spiff_step": self.spiff_step, "process_initiator_username": self.process_initiator.username, } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index ade1f60d..ad2041cb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -8,7 +8,6 @@ from typing import Optional from typing import TypedDict from sqlalchemy import ForeignKey -from sqlalchemy.orm import deferred from sqlalchemy.orm import relationship from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( @@ -69,7 +68,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) identifier: str = db.Column(db.String(50), nullable=False, index=True) - report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore + report_metadata: dict = db.Column(db.JSON) created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore created_by = relationship("UserModel") created_at_in_seconds = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py deleted file mode 100644 index 58d34095..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Spiff_step_details.""" -from dataclasses import dataclass -from typing import Union - -from sqlalchemy import ForeignKey -from sqlalchemy import UniqueConstraint -from sqlalchemy.orm import deferred - -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel - - -@dataclass -class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): - """SpiffStepDetailsModel.""" - - __tablename__ = "spiff_step_details" - __table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),) - - id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore - ) - spiff_step: int = db.Column(db.Integer, nullable=False) - task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore - task_id: str = db.Column(db.String(50), nullable=False) - task_state: str = db.Column(db.String(50), nullable=False) - bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) - delta_json: list = deferred(db.Column(db.JSON)) # type: ignore - - start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False) - - # to fix mypy in 3.9 - not sure why syntax like: - # float | None - # works in other dataclass db models - end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index a7812ba0..bc2fcff3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -47,6 +47,7 @@ class MultiInstanceType(enum.Enum): @dataclass class TaskModel(SpiffworkflowBaseDBModel): __tablename__ = "task" + __allow_unmapped__ = True id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True) bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False, index=True) # type: ignore @@ -63,9 +64,11 @@ class TaskModel(SpiffworkflowBaseDBModel): json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True) - start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) + start_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) + data: Optional[dict] = None + def python_env_data(self) -> dict: return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash) @@ -106,7 +109,6 @@ class Task: event_definition: Union[dict[str, Any], None] = None, call_activity_process_identifier: Optional[str] = None, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ): """__init__.""" self.id = id @@ -121,7 +123,6 @@ class Task: self.event_definition = event_definition self.call_activity_process_identifier = call_activity_process_identifier self.calling_subprocess_task_id = calling_subprocess_task_id - self.task_spiff_step = task_spiff_step self.data = data if self.data is None: @@ -179,7 +180,6 @@ class Task: "event_definition": self.event_definition, "call_activity_process_identifier": self.call_activity_process_identifier, "calling_subprocess_task_id": self.calling_subprocess_task_id, - "task_spiff_step": self.task_spiff_step, } @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 791e1dea..ec243649 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint from sqlalchemy.orm import relationship @@ -11,6 +13,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class TaskDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "task_definition" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py new file mode 100644 index 00000000..45c0bd28 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py @@ -0,0 +1,25 @@ +from typing import Any + +import flask.wrappers +import requests +from flask import current_app +from flask.wrappers import Response + + +def connector_proxy_type_ahead_url() -> Any: + """Returns the connector proxy type ahead url.""" + return current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL"] + + +def type_ahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response: + url = f"{connector_proxy_type_ahead_url()}/v1/type-ahead/{category}?prefix={prefix}&limit={limit}" + + proxy_response = requests.get(url) + status = proxy_response.status_code + if status // 100 == 2: + response = proxy_response.text + else: + # supress pop up errors on the client + status = 200 + response = "[]" + return Response(response, status=status, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py index 7cd65a37..f7db74db 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py @@ -59,6 +59,12 @@ def message_instance_list( # payload: dict, # process_instance_id: Optional[int], # } +# +# For example: +# curl 'http://localhost:7000/v1.0/messages/gogo' \ +# -H 'authorization: Bearer [FIXME]' \ +# -H 'content-type: application/json' \ +# --data-raw '{"payload":{"sure": "yes", "food": "spicy"}}' def message_send( message_name: str, body: Dict[str, Any], diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css index 15b093f6..94d1e057 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css @@ -2,7 +2,7 @@ margin: 0; padding: 0; background-color:white; - font-family: 'Arial'; + font-family: 'Arial, sans-serif'; } header { width: 100%; diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html index 858355c3..815275d2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html @@ -1,12 +1,12 @@ - + Login Form
- + Small SpiffWorkflow logo

Login

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index a07f5f49..ac38eff0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -16,15 +16,9 @@ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ( - ProcessInstanceTaskDataCannotBeUpdatedError, -) -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_file_data import ( ProcessInstanceFileDataModel, ) @@ -38,7 +32,6 @@ from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.task_service import TaskService process_api_blueprint = Blueprint("process_api", __name__) @@ -169,60 +162,6 @@ def github_webhook_receive(body: Dict) -> Response: return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json") -def task_data_update( - process_instance_id: str, - modified_process_model_identifier: str, - task_id: str, - body: Dict, -) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - if process_instance.status != "suspended": - raise ProcessInstanceTaskDataCannotBeUpdatedError( - "The process instance needs to be suspended to update the task-data." - f" It is currently: {process_instance.status}" - ) - - task_model = TaskModel.query.filter_by(guid=task_id).first() - if task_model is None: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - json_data_dict = TaskService.update_task_data_on_task_model( - task_model, new_task_data_dict, "json_data_hash" - ) - if json_data_dict is not None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) - ProcessInstanceProcessor.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id - ) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: """Get_required_parameter_or_raise.""" return_value = None @@ -263,30 +202,6 @@ def send_bpmn_event( ) -def manual_complete_task( - modified_process_model_identifier: str, - process_instance_id: str, - task_id: str, - body: Dict, -) -> Response: - """Mark a task complete without executing it.""" - execute = body.get("execute", True) - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - processor = ProcessInstanceProcessor(process_instance) - processor.manual_complete_task(task_id, execute) - else: - raise ApiError( - error_code="complete_task", - message=f"Could not complete Task {task_id} in Instance {process_instance_id}", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _commit_and_push_to_git(message: str) -> None: """Commit_and_push_to_git.""" if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 93e78389..c6a8ddcd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -4,7 +4,6 @@ import json from typing import Any from typing import Dict from typing import Optional -from uuid import UUID import flask.wrappers from flask import current_app @@ -13,12 +12,12 @@ from flask import jsonify from flask import make_response from flask import request from flask.wrappers import Response -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState from sqlalchemy import and_ from sqlalchemy import or_ +from sqlalchemy.orm import aliased from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel @@ -42,8 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -59,9 +56,6 @@ from spiffworkflow_backend.services.error_handling_service import ErrorHandlingS from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.message_service import MessageService -from spiffworkflow_backend.services.process_instance_lock_service import ( - ProcessInstanceLockService, -) from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) @@ -85,6 +79,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService def process_instance_create( @@ -107,7 +102,6 @@ def process_instance_create( process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_identifier, g.user ) - ProcessInstanceQueueService.enqueue(process_instance) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), status=201, @@ -133,7 +127,6 @@ def process_instance_run( if do_engine_steps: try: - processor.lock_process_instance("Web") processor.do_engine_steps(save=True) except ( ApiError, @@ -144,7 +137,8 @@ def process_instance_run( raise e except Exception as e: ErrorHandlingService().handle_error(processor, e) - # fixme: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes + # FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes. + # we need to recurse through all last tasks if the last task is a call activity or subprocess. task = processor.bpmn_process_instance.last_task raise ApiError.from_task( error_code="unknown_exception", @@ -152,9 +146,6 @@ def process_instance_run( status_code=400, task=task, ) from e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() @@ -175,14 +166,11 @@ def process_instance_terminate( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.terminate() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.terminate() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -196,14 +184,11 @@ def process_instance_suspend( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.suspend() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.suspend() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -217,14 +202,11 @@ def process_instance_resume( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.resume() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.resume() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -392,15 +374,21 @@ def process_instance_list( return make_response(jsonify(response_json), 200) -def process_instance_report_column_list() -> flask.wrappers.Response: +def process_instance_report_column_list(process_model_identifier: Optional[str] = None) -> flask.wrappers.Response: """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() - columns_for_metadata = ( + columns_for_metadata_query = ( db.session.query(ProcessInstanceMetadataModel.key) .order_by(ProcessInstanceMetadataModel.key) .distinct() # type: ignore - .all() ) + if process_model_identifier: + columns_for_metadata_query = columns_for_metadata_query.join(ProcessInstanceModel) + columns_for_metadata_query = columns_for_metadata_query.filter( + ProcessInstanceModel.process_model_identifier == process_model_identifier + ) + + columns_for_metadata = columns_for_metadata_query.all() columns_for_metadata_strings = [ {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata ] @@ -449,7 +437,6 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete() db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) db.session.commit() @@ -556,142 +543,169 @@ def process_instance_report_show( def process_instance_task_list_without_task_data_for_me( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - most_recent_tasks_only, + _modified_process_model_identifier=modified_process_model_identifier, + process_instance=process_instance, + most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) def process_instance_task_list_without_task_data( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - most_recent_tasks_only, + _modified_process_model_identifier=modified_process_model_identifier, + process_instance=process_instance, + most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) def process_instance_task_list( _modified_process_model_identifier: str, process_instance: ProcessInstanceModel, - all_tasks: bool = False, - spiff_step: int = 0, + bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - step_detail_query = db.session.query(SpiffStepDetailsModel).filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, + bpmn_process_ids = [] + if bpmn_process_guid: + bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() + bpmn_processes = TaskService.bpmn_process_and_descendants([bpmn_process]) + bpmn_process_ids = [p.id for p in bpmn_processes] + + task_model_query = db.session.query(TaskModel).filter( + TaskModel.process_instance_id == process_instance.id, ) - if spiff_step > 0: - step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) + to_task_model: Optional[TaskModel] = None + task_models_of_parent_bpmn_processes_guids: list[str] = [] + if to_task_guid is not None: + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise ApiError( + error_code="task_not_found", + message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'", + status_code=400, + ) - step_details = step_detail_query.all() + if to_task_model.state != "COMPLETED": + # TODO: find a better term for viewing at task state + raise ApiError( + error_code="task_cannot_be_viewed_at", + message=( + f"Desired task with guid '{to_task_guid}' for process instance '{process_instance.id}' was never" + " completed and therefore cannot be viewed at." + ), + status_code=400, + ) - processor = ProcessInstanceProcessor(process_instance) - full_bpmn_process_dict = processor.full_bpmn_process_dict - tasks = full_bpmn_process_dict["tasks"] - subprocesses = full_bpmn_process_dict["subprocesses"] - - steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - - def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - if spiff_task["last_state_change"] > step_ended: - spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - spiff_task["data"] = {} - - if spiff_step > 0: - last_change = step_details[-1].end_in_seconds or 0 - for spiff_task in tasks.values(): - restore_task(spiff_task, last_change) - for subprocess in subprocesses.values(): - for spiff_task in subprocess["tasks"].values(): - restore_task(spiff_task, last_change) - - bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - if spiff_step > 0: - bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - if not subprocess.is_completed(): - task = bpmn_process_instance.get_task(subprocess_id) - task._set_state(TaskState.WAITING) - - spiff_tasks = None - if all_tasks: - spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - ( - subprocesses_by_child_task_ids, - task_typename_by_task_id, - ) = processor.get_subprocesses_by_child_task_ids() - processor.get_highest_level_calling_subprocesses_by_child_task_ids( - subprocesses_by_child_task_ids, task_typename_by_task_id - ) - - spiff_tasks_to_process = spiff_tasks - if most_recent_tasks_only: - spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} - current_tasks = {} - for spiff_task in spiff_tasks_to_process: - row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" - if spiff_task.state in [TaskState.READY, TaskState.WAITING]: - current_tasks[row_id] = spiff_task - if ( - row_id not in spiff_tasks_by_process_id_and_task_name - or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state - ): - spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task - spiff_tasks_by_process_id_and_task_name.update(current_tasks) - spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() - - response = [] - for spiff_task in spiff_tasks_to_process: - task_spiff_step: Optional[int] = None - if str(spiff_task.id) in steps_by_id: - task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step - calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) - task = ProcessInstanceService.spiff_task_to_api_task( - processor, - spiff_task, - calling_subprocess_task_id=calling_subprocess_task_id, - task_spiff_step=task_spiff_step, + ( + _parent_bpmn_processes, + task_models_of_parent_bpmn_processes, + ) = TaskService.task_models_of_parent_bpmn_processes(to_task_model) + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + task_model_query = task_model_query.filter( + or_( + TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore + TaskModel.guid.in_(task_models_of_parent_bpmn_processes_guids), # type: ignore + ) ) - if task.state in ["MAYBE", "LIKELY"]: - task.state = "FUTURE" - response.append(task) - return make_response(jsonify(response), 200) + bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel) + + task_model_query = ( + task_model_query.order_by(TaskModel.id.desc()) # type: ignore + .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) + .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) + .outerjoin( + direct_parent_bpmn_process_alias, + direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id, + ) + .outerjoin( + direct_parent_bpmn_process_definition_alias, + direct_parent_bpmn_process_definition_alias.id + == direct_parent_bpmn_process_alias.bpmn_process_definition_id, + ) + .join( + BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id + ) + .add_columns( + BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore + BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore + bpmn_process_alias.guid.label("bpmn_process_guid"), + # not sure why we needed these + # direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), + # direct_parent_bpmn_process_definition_alias.bpmn_identifier.label( + # "bpmn_process_direct_parent_bpmn_identifier" + # ), + TaskDefinitionModel.bpmn_identifier, + TaskDefinitionModel.bpmn_name, + TaskDefinitionModel.typename, + TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore + TaskModel.guid, + TaskModel.state, + TaskModel.end_in_seconds, + TaskModel.start_in_seconds, + ) + ) + + if len(bpmn_process_ids) > 0: + task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) + + task_models = task_model_query.all() + task_model_list = {} + if most_recent_tasks_only: + for task_model in task_models: + bpmn_process_guid = task_model.bpmn_process_guid or "TOP" + row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}" + if row_key not in task_model_list: + task_model_list[row_key] = task_model + task_models = list(task_model_list.values()) + + if to_task_model is not None: + task_models_dict = json.loads(current_app.json.dumps(task_models)) + for task_model in task_models_dict: + end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None + if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": + TaskService.reset_task_model_dict(task_model, state="READY") + elif ( + end_in_seconds is None + or to_task_model.end_in_seconds is None + or to_task_model.end_in_seconds < end_in_seconds + ) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids: + TaskService.reset_task_model_dict(task_model, state="WAITING") + return make_response(jsonify(task_models_dict), 200) + + return make_response(jsonify(task_models), 200) def process_instance_reset( process_instance_id: int, modified_process_model_identifier: str, - spiff_step: int = 0, + to_task_guid: str, ) -> flask.wrappers.Response: """Reset a process instance to a particular step.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - processor = ProcessInstanceProcessor(process_instance) - processor.reset_process(spiff_step) + ProcessInstanceProcessor.reset_process(process_instance, to_task_guid) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py index 303dd94a..3d7ab5af 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -102,7 +102,6 @@ def script_unit_test_run( """Script_unit_test_run.""" # FIXME: We should probably clear this somewhere else but this works current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None python_script = _get_required_parameter_or_raise("python_script", body) input_json = _get_required_parameter_or_raise("input_json", body) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index ad9868e6..9baffd25 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -34,10 +34,15 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceTaskDataCannotBeUpdatedError, +) +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( _find_principal_or_raise, @@ -51,11 +56,15 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService class TaskDataSelectOption(TypedDict): @@ -169,58 +178,91 @@ def task_list_for_my_groups( def task_data_show( modified_process_model_identifier: str, process_instance_id: int, - spiff_step: int = 0, + task_guid: str, ) -> flask.wrappers.Response: - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + task_model.data = task_model.json_data() + return make_response(jsonify(task_model), 200) - if step_detail is None: + +def task_data_update( + process_instance_id: str, + modified_process_model_identifier: str, + task_guid: str, + body: Dict, +) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + if process_instance.status != "suspended": + raise ProcessInstanceTaskDataCannotBeUpdatedError( + "The process instance needs to be suspended to update the task-data." + f" It is currently: {process_instance.status}" + ) + + task_model = TaskModel.query.filter_by(guid=task_guid).first() + if task_model is None: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_guid} in Instance: {process_instance_id}.", + ) + + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, new_task_data_dict, "json_data_hash" + ) + if json_data_dict is not None: + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) + ProcessInstanceProcessor.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid + ) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: raise ApiError( - error_code="spiff_step_for_proces_instance_not_found", - message="The given spiff step for the given process instance could not be found.", - status_code=400, + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_guid}.", ) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = processor.__class__.get_task_by_bpmn_identifier( - step_detail.bpmn_task_identifier, processor.bpmn_process_instance + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", ) - task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"] - task = ProcessInstanceService.spiff_task_to_api_task( - processor, - spiff_task, - task_spiff_step=spiff_step, + + +def manual_complete_task( + modified_process_model_identifier: str, + process_instance_id: str, + task_guid: str, + body: Dict, +) -> Response: + """Mark a task complete without executing it.""" + execute = body.get("execute", True) + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.manual_complete_task(task_guid, execute) + else: + raise ApiError( + error_code="complete_task", + message=f"Could not complete Task {task_guid} in Instance {process_instance_id}", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", ) - task.data = task_data - - return make_response(jsonify(task), 200) -def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: - if task.form_ui_schema is None: - task.form_ui_schema = {} - - if task.data and "form_ui_hidden_fields" in task.data: - hidden_fields = task.data["form_ui_hidden_fields"] - for hidden_field in hidden_fields: - hidden_field_parts = hidden_field.split(".") - relevant_depth_of_ui_schema = task.form_ui_schema - for ii, hidden_field_part in enumerate(hidden_field_parts): - if hidden_field_part not in relevant_depth_of_ui_schema: - relevant_depth_of_ui_schema[hidden_field_part] = {} - relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] - if len(hidden_field_parts) == ii + 1: - relevant_depth_of_ui_schema["ui:widget"] = "hidden" - - -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: +def task_show(process_instance_id: int, task_guid: str) -> flask.wrappers.Response: """Task_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -235,12 +277,12 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response process_instance.process_model_identifier, ) - _find_human_task_or_raise(process_instance_id, task_id) + _find_human_task_or_raise(process_instance_id, task_guid) form_schema_file_name = "" form_ui_schema_file_name = "" processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) extensions = spiff_task.task_spec.extensions if "properties" in extensions: @@ -273,7 +315,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ApiError( error_code="missing_form_file", message=( - f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}" + f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:" + f" {task_guid}" ), status_code=400, ) @@ -338,11 +381,11 @@ def process_data_show( ) -def task_submit_shared( +def _task_submit_shared( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: principal = _find_principal_or_raise() process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -357,7 +400,7 @@ def task_submit_shared( ) processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user) if spiff_task.state != TaskState.READY: @@ -369,31 +412,10 @@ def task_submit_shared( ) ) - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - human_task = _find_human_task_or_raise( - process_instance_id=process_instance_id, - task_id=task_id, - only_tasks_that_can_be_completed=True, - ) - - retry_times = current_app.config["SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES"] - retry_interval_in_seconds = current_app.config[ - "SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS" - ] - - with sentry_sdk.start_span(op="task", description="complete_form_task"): - processor.lock_process_instance("Web", retry_times, retry_interval_in_seconds) - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - processor.unlock_process_instance("Web") - + # multi-instance code from crconnect - we may need it or may not + # if terminate_loop and spiff_task.is_looping(): + # spiff_task.terminate_loop() + # # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. # if update_all: @@ -404,28 +426,55 @@ def task_submit_shared( # last_index = next_task.task_info()["mi_index"] # next_task = processor.next_task() - next_human_task_assigned_to_me = ( - HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) - .order_by(asc(HumanTaskModel.id)) # type: ignore - .join(HumanTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_human_task_assigned_to_me: - return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) + if save_as_draft: + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + ProcessInstanceService.update_form_task_data(processor, spiff_task, body, g.user) + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, spiff_task.data, "json_data_hash" + ) + if json_data_dict is not None: + TaskService.insert_or_update_json_data_dict(json_data_dict) + db.session.add(task_model) + db.session.commit() + else: + human_task = _find_human_task_or_raise( + process_instance_id=process_instance_id, + task_guid=task_guid, + only_tasks_that_can_be_completed=True, + ) + + with sentry_sdk.start_span(op="task", description="complete_form_task"): + with ProcessInstanceQueueService.dequeued(process_instance): + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_human_task_assigned_to_me: + return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") def task_submit( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): - return task_submit_shared(process_instance_id, task_id, body, terminate_loop) + return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft) def _get_tasks( @@ -580,15 +629,15 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> def _get_spiff_task_from_process_instance( - task_id: str, + task_guid: str, process_instance: ProcessInstanceModel, processor: Union[ProcessInstanceProcessor, None] = None, ) -> SpiffTask: """Get_spiff_task_from_process_instance.""" if processor is None: processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + task_uuid = uuid.UUID(task_guid) + spiff_task = processor.bpmn_process_instance.get_task_from_id(task_uuid) if spiff_task is None: raise ( @@ -679,15 +728,15 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any: def _find_human_task_or_raise( process_instance_id: int, - task_id: str, + task_guid: str, only_tasks_that_can_be_completed: bool = False, ) -> HumanTaskModel: if only_tasks_that_can_be_completed: human_task_query = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id, completed=False + process_instance_id=process_instance_id, task_id=task_guid, completed=False ) else: - human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id) + human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_guid) human_task: HumanTaskModel = human_task_query.first() if human_task is None: @@ -695,10 +744,40 @@ def _find_human_task_or_raise( ApiError( error_code="no_human_task", message=( - f"Cannot find a task to complete for task id '{task_id}' and" + f"Cannot find a task to complete for task id '{task_guid}' and" f" process instance {process_instance_id}." ), status_code=500, ) ) return human_task + + +def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: + if task.form_ui_schema is None: + task.form_ui_schema = {} + + if task.data and "form_ui_hidden_fields" in task.data: + hidden_fields = task.data["form_ui_hidden_fields"] + for hidden_field in hidden_fields: + hidden_field_parts = hidden_field.split(".") + relevant_depth_of_ui_schema = task.form_ui_schema + for ii, hidden_field_part in enumerate(hidden_field_parts): + if hidden_field_part not in relevant_depth_of_ui_schema: + relevant_depth_of_ui_schema[hidden_field_part] = {} + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] + if len(hidden_field_parts) == ii + 1: + relevant_depth_of_ui_schema["ui:widget"] = "hidden" + + +def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel: + task_model: Optional[TaskModel] = TaskModel.query.filter_by( + guid=task_guid, process_instance_id=process_instance_id + ).first() + if task_model is None: + raise ApiError( + error_code="task_not_found", + message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", + status_code=400, + ) + return task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 14ce1027..e1fc02eb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -264,6 +264,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Optional[Res ) else: + current_app.logger.error(f"id_token not found in payload from provider: {auth_token_object}") raise ApiError( error_code="invalid_token", message="Login failed. Please try again", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py index a650cb48..f599d799 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.scripts.script import Script @@ -43,14 +42,6 @@ class DeleteProcessInstancesWithCriteria(Script): rows_affected = len(results) if rows_affected > 0: - ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore - - step_details = SpiffStepDetailsModel.query.filter( - SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore - ).all() - - for deletion in step_details: - db.session.delete(deletion) for deletion in results: db.session.delete(deletion) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py index b9f7c61b..e8d534b8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py @@ -14,5 +14,5 @@ def safe_assertion(condition: bool) -> Generator[bool, None, None]: if not condition: sentry_sdk.capture_exception(e) current_app.logger.exception(e) - if current_app.config["ENV_IDENTIFIER"] == "local_development": + if current_app.config["ENV_IDENTIFIER"] in ["local_development", "unit_testing"]: raise e diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 94f3a67f..b96f98e5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -6,7 +6,6 @@ import sys from typing import Any from typing import Optional -from flask import g from flask.app import Flask @@ -88,28 +87,6 @@ class JsonFormatter(logging.Formatter): return json.dumps(message_dict, default=str) -class SpiffFilter(logging.Filter): - """SpiffFilter.""" - - def __init__(self, app: Flask): - """__init__.""" - self.app = app - super().__init__() - - def filter(self, record: logging.LogRecord) -> bool: - """Filter.""" - tld = self.app.config["THREAD_LOCAL_DATA"] - process_instance_id = "" - if hasattr(tld, "process_instance_id"): - process_instance_id = tld.process_instance_id - setattr(record, "process_instance_id", process_instance_id) # noqa: B010 - if hasattr(tld, "spiff_step"): - setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010 - if hasattr(g, "user") and g.user: - setattr(record, "current_user_id", g.user.id) # noqa: B010 - return True - - def setup_logger(app: Flask) -> None: """Setup_logger.""" upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ea59c414..f4467613 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,5 +1,6 @@ """Process_instance_processor.""" import _strptime # type: ignore +import copy import decimal import json import logging @@ -39,19 +40,19 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore -from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore -from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore -from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore +from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -84,29 +85,23 @@ from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task import TaskModel +from spiffworkflow_backend.models.task import TaskNotFoundError from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.file_system_service import FileSystemService -from spiffworkflow_backend.services.process_instance_lock_service import ( - ProcessInstanceLockService, -) -from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( execution_strategy_named, ) -from spiffworkflow_backend.services.workflow_execution_service import ( - StepDetailLoggingDelegate, -) from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -114,8 +109,6 @@ from spiffworkflow_backend.services.workflow_execution_service import ( WorkflowExecutionService, ) -SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter) - # Sorry about all this crap. I wanted to move this thing to another file, but # importing a bunch of types causes circular imports. @@ -150,14 +143,6 @@ class MissingProcessInfoError(Exception): """MissingProcessInfoError.""" -class SpiffStepDetailIsMissingError(Exception): - pass - - -class TaskNotFoundError(Exception): - pass - - class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore def __init__(self, environment_globals: Dict[str, Any]): """BoxedTaskDataBasedScriptEngineEnvironment.""" @@ -434,9 +419,22 @@ class ProcessInstanceProcessor: # * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow def __init__(self, process_instance_model: ProcessInstanceModel, validate_only: bool = False) -> None: """Create a Workflow Processor based on the serialized information available in the process_instance model.""" + with ProcessInstanceQueueService.dequeued(process_instance_model): + try: + self.setup_processor_with_process_instance( + process_instance_model=process_instance_model, validate_only=validate_only + ) + except Exception as ex: + process_instance_model.status = ProcessInstanceStatus.error.value + db.session.add(process_instance_model) + db.session.commit() + raise ex + + def setup_processor_with_process_instance( + self, process_instance_model: ProcessInstanceModel, validate_only: bool = False + ) -> None: tld = current_app.config["THREAD_LOCAL_DATA"] tld.process_instance_id = process_instance_model.id - tld.spiff_step = process_instance_model.spiff_step # we want this to be the fully qualified path to the process model including all group subcomponents current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( @@ -687,7 +685,7 @@ class ProcessInstanceProcessor: single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True) spiff_bpmn_process_dict.update(single_bpmn_process_dict) - bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all() + bpmn_subprocesses = BpmnProcessModel.query.filter_by(top_level_process_id=bpmn_process.id).all() bpmn_subprocess_id_to_guid_mappings = {} for bpmn_subprocess in bpmn_subprocesses: bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid @@ -760,6 +758,9 @@ class ProcessInstanceProcessor: spec, subprocesses ) bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only + + # run _predict to ensure tasks are predicted to add back in LIKELY and MAYBE tasks + bpmn_process_instance._predict() return ( bpmn_process_instance, full_bpmn_process_dict, @@ -817,37 +818,6 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping( - self, - spiff_task: Optional[SpiffTask] = None, - start_in_seconds: Optional[float] = None, - end_in_seconds: Optional[float] = None, - ) -> dict: - """SaveSpiffStepDetails.""" - if spiff_task is None: - # TODO: safer to pass in task vs use last task? - spiff_task = self.bpmn_process_instance.last_task - - if spiff_task is None: - return {} - - # it's only None when we're starting a human task (it's not complete yet) - if start_in_seconds is None: - start_in_seconds = time.time() - - task_json = self.get_task_dict_from_spiff_task(spiff_task) - - return { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": task_json, - "task_id": str(spiff_task.id), - "task_state": spiff_task.get_state_name(), - "bpmn_task_identifier": spiff_task.task_spec.name, - "start_in_seconds": start_in_seconds, - "end_in_seconds": end_in_seconds, - } - def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: """Extract_metadata.""" metadata_extraction_paths = process_model_info.metadata_extraction_paths @@ -879,7 +849,7 @@ class ProcessInstanceProcessor: process_instance_id=self.process_instance_model.id, key=key, ) - pim.value = data_for_key + pim.value = str(data_for_key)[0:255] db.session.add(pim) db.session.commit() @@ -1185,14 +1155,7 @@ class ProcessInstanceProcessor: human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task) db.session.add(human_task_user) - self.increment_spiff_step() - spiff_step_detail_mapping = self.spiff_step_details_mapping( - spiff_task=ready_or_waiting_task, start_in_seconds=time.time() - ) - spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping) - db.session.add(spiff_step_detail) db.session.commit() - # self.log_spiff_step_details(spiff_step_detail_mapping) if len(human_tasks) > 0: for at in human_tasks: @@ -1223,19 +1186,11 @@ class ProcessInstanceProcessor: # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) - def add_step(self, step: Union[dict, None] = None) -> None: - """Add a spiff step.""" - if step is None: - step = self.spiff_step_details_mapping() - spiff_step_detail = SpiffStepDetailsModel(**step) - db.session.add(spiff_step_detail) - db.session.commit() - # self.log_spiff_step_details(step) - def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} - spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) + start_in_seconds = time.time() + spiff_task = self.bpmn_process_instance.get_task_from_id(UUID(task_id)) event_type = ProcessInstanceEventType.task_skipped.value if execute: current_app.logger.info( @@ -1267,6 +1222,8 @@ class ProcessInstanceProcessor: spiff_task.workflow.last_task = spiff_task spiff_tasks_updated[spiff_task.id] = spiff_task + end_in_seconds = time.time() + if isinstance(spiff_task.task_spec, EndEvent): for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow): task.complete() @@ -1279,17 +1236,17 @@ class ProcessInstanceProcessor: task.complete() spiff_tasks_updated[task.id] = task - self.increment_spiff_step() - self.add_step() - for updated_spiff_task in spiff_tasks_updated.values(): - bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - updated_spiff_task, - self.process_instance_model, - self._serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) + ( + bpmn_process, + task_model, + new_task_models, + new_json_data_dicts, + ) = TaskService.find_or_create_task_model_from_spiff_task( + updated_spiff_task, + self.process_instance_model, + self._serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) bpmn_process_to_use = bpmn_process or task_model.bpmn_process bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( @@ -1303,6 +1260,11 @@ class ProcessInstanceProcessor: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + # spiff_task should be the main task we are completing and only it should get the timestamps + if task_model.guid == str(spiff_task.id): + task_model.start_in_seconds = start_in_seconds + task_model.end_in_seconds = end_in_seconds + new_task_models[task_model.guid] = task_model db.session.bulk_save_objects(new_task_models.values()) TaskService.insert_or_update_json_data_records(new_json_data_dicts) @@ -1312,48 +1274,129 @@ class ProcessInstanceProcessor: # Saving the workflow seems to reset the status self.suspend() - def reset_process(self, spiff_step: int) -> None: + # FIXME: this currently cannot work for multi-instance tasks and loopback. It can somewhat for not those + # if we can properly handling resetting children tasks. Right now if we set them all to FUTURE then + # they never get picked up by spiff and processed. The process instance just stops after the to_task_guid + # and marks itself complete without processing any of the children. + @classmethod + def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None: """Reset a process to an earlier state.""" - spiff_logger = logging.getLogger("spiff") - spiff_logger.info( - f"Process reset from step {spiff_step}", - extra=self.bpmn_process_instance.log_info(), + # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + cls.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid ) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id, - SpiffStepDetailsModel.spiff_step == spiff_step, + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" ) - .first() + + # NOTE: run ALL queries before making changes to ensure we get everything before anything changes + parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + to_task_model ) - if step_detail is not None: - self.increment_spiff_step() - self.add_step( - { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": step_detail.task_json, - "timestamp": round(time.time()), - } + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] + + tasks_to_update_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_update = tasks_to_update_query.all() + tasks_to_update_guids = [t.guid for t in tasks_to_update] + + tasks_to_delete_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_delete = tasks_to_delete_query.all() + tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + tasks_to_delete_ids = [t.id for t in tasks_to_delete] + + # delete bpmn processes that are also tasks that we either deleted or will update. + # this is to force spiff to recreate those bpmn processes with the correct associated task guids. + bpmn_processes_to_delete_query = db.session.query(BpmnProcessModel).filter( + or_( + BpmnProcessModel.guid.in_(tasks_to_delete_guids), # type: ignore + and_( + BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore + BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore + ), + ) + ) + bpmn_processes_to_delete = bpmn_processes_to_delete_query.order_by( + BpmnProcessModel.id.desc() # type: ignore + ).all() + + # delete any human task that was for a task that we deleted since they will get recreated later. + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + ).all() + + # ensure the correct order for foreign keys + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + for task_to_delete in tasks_to_delete: + db.session.delete(task_to_delete) + for bpmn_process_to_delete in bpmn_processes_to_delete: + db.session.delete(bpmn_process_to_delete) + + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) + + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + + for task_to_update in tasks_to_update: + TaskService.reset_task_model(task_to_update, state="FUTURE") + db.session.bulk_save_objects(tasks_to_update) + + parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + if parent_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" ) - dct = self._serializer.workflow_to_dict(self.bpmn_process_instance) - dct["tasks"] = step_detail.task_json["tasks"] - dct["subprocesses"] = step_detail.task_json["subprocesses"] - self.bpmn_process_instance = self._serializer.workflow_from_dict(dct) + TaskService.reset_task_model( + to_task_model, + state="READY", + json_data_hash=parent_task_model.json_data_hash, + python_env_data_hash=parent_task_model.python_env_data_hash, + ) + db.session.add(to_task_model) + for task_model in task_models_of_parent_bpmn_processes: + TaskService.reset_task_model(task_model, state="WAITING") + db.session.bulk_save_objects(task_models_of_parent_bpmn_processes) - # Cascade does not seems to work on filters, only directly through the session - tasks = self.bpmn_process_instance.get_tasks(TaskState.NOT_FINISHED_MASK) - rows = HumanTaskModel.query.filter( - HumanTaskModel.task_id.in_(str(t.id) for t in tasks) # type: ignore - ).all() - for row in rows: - db.session.delete(row) + bpmn_process = to_task_model.bpmn_process + properties_json = copy.copy(bpmn_process.properties_json) + properties_json["last_task"] = parent_task_model.guid + bpmn_process.properties_json = properties_json + db.session.add(bpmn_process) + db.session.commit() - self.save() - self.suspend() + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: @@ -1411,7 +1454,7 @@ class ProcessInstanceProcessor: @staticmethod def update_spiff_parser_with_all_process_dependency_files( - parser: BpmnDmnParser, + parser: SpiffBpmnParser, processed_identifiers: Optional[set[str]] = None, ) -> None: """Update_spiff_parser_with_all_process_dependency_files.""" @@ -1518,29 +1561,6 @@ class ProcessInstanceProcessor: # current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}") return the_status - # TODO: replace with implicit/more granular locking in workflow execution service - # TODO: remove the retry logic once all user_input_required's don't need to be locked to check timers - def lock_process_instance( - self, lock_prefix: str, retry_count: int = 0, retry_interval_in_seconds: int = 0 - ) -> None: - try: - ProcessInstanceQueueService.dequeue(self.process_instance_model) - except ProcessInstanceIsAlreadyLockedError as e: - if retry_count > 0: - current_app.logger.info( - f"process_instance_id {self.process_instance_model.id} is locked. " - f"will retry {retry_count} times with delay of {retry_interval_in_seconds}." - ) - if retry_interval_in_seconds > 0: - time.sleep(retry_interval_in_seconds) - self.lock_process_instance(lock_prefix, retry_count - 1, retry_interval_in_seconds) - else: - raise e - - # TODO: replace with implicit/more granular locking in workflow execution service - def unlock_process_instance(self, lock_prefix: str) -> None: - ProcessInstanceQueueService.enqueue(self.process_instance_model) - def process_bpmn_messages(self) -> None: """Process_bpmn_messages.""" bpmn_messages = self.bpmn_process_instance.get_bpmn_messages() @@ -1591,31 +1611,27 @@ class ProcessInstanceProcessor: db.session.add(message_instance) db.session.commit() - def increment_spiff_step(self) -> None: - """Spiff_step++.""" - spiff_step = self.process_instance_model.spiff_step or 0 - spiff_step += 1 - self.process_instance_model.spiff_step = spiff_step - current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step - db.session.add(self.process_instance_model) - def do_engine_steps( self, exit_at: None = None, save: bool = False, execution_strategy_name: Optional[str] = None, ) -> None: - # NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and - # set the TaskModelSavingDelegate's secondary_engine_step_delegate to None. - def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict: - self._script_engine.environment.revise_state_with_task_data(task) - return self.spiff_step_details_mapping(task, start, end) + with ProcessInstanceQueueService.dequeued(self.process_instance_model): + # TODO: ideally we just lock in the execution service, but not sure + # about _add_bpmn_process_definitions and if that needs to happen in + # the same lock like it does on main + self._do_engine_steps(exit_at, save, execution_strategy_name) + def _do_engine_steps( + self, + exit_at: None = None, + save: bool = False, + execution_strategy_name: Optional[str] = None, + ) -> None: self._add_bpmn_process_definitions() - step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( - secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, @@ -1636,38 +1652,13 @@ class ProcessInstanceProcessor: execution_service.do_engine_steps(exit_at, save) finally: # clear out failling spiff tasks here since the ProcessInstanceProcessor creates an instance of the - # script engine on a class variable. + # script engine on a class variable. if ( hasattr(self._script_engine, "failing_spiff_task") and self._script_engine.failing_spiff_task is not None ): self._script_engine.failing_spiff_task = None - # log the spiff step details so we know what is processing the process - # instance when a human task has a timer event. - def log_spiff_step_details(self, step_details: Any) -> None: - if ProcessInstanceLockService.has_lock(self.process_instance_model.id): - locked_by = ProcessInstanceLockService.locked_by() - message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}" - current_app.logger.debug(message) - - def cancel_notify(self) -> None: - """Cancel_notify.""" - self.__cancel_notify(self.bpmn_process_instance) - - @staticmethod - def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None: - """__cancel_notify.""" - try: - # A little hackly, but make the bpmn_process_instance catch a cancel event. - bpmn_process_instance.signal("cancel") # generate a cancel signal. - bpmn_process_instance.catch(CancelEventDefinition()) - # Due to this being static, can't save granular step details in this case - # TODO: do_engine_steps without a lock - bpmn_process_instance.do_engine_steps() - except WorkflowTaskException as we: - raise ApiError.from_workflow_exception("task_error", str(we), we) from we - @classmethod def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0] @@ -1808,7 +1799,7 @@ class ProcessInstanceProcessor: ) task_model.start_in_seconds = time.time() - self.bpmn_process_instance.complete_task_from_id(spiff_task.id) + self.bpmn_process_instance.run_task_from_id(spiff_task.id) task_model.end_in_seconds = time.time() human_task.completed_by_user_id = user.id @@ -1816,35 +1807,10 @@ class ProcessInstanceProcessor: human_task.task_status = spiff_task.get_state_name() db.session.add(human_task) - # FIXME: remove when we switch over to using tasks only - details_model = ( - SpiffStepDetailsModel.query.filter_by( - process_instance_id=self.process_instance_model.id, - task_id=str(spiff_task.id), - task_state="READY", - ) - .order_by(SpiffStepDetailsModel.id.desc()) # type: ignore - .first() - ) - if details_model is None: - raise SpiffStepDetailIsMissingError( - "Cannot find a ready spiff_step_detail entry for process instance" - f" {self.process_instance_model.id} and task_id is {spiff_task.id}" - ) - - details_model.task_state = spiff_task.get_state_name() - details_model.end_in_seconds = time.time() - details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task) - db.session.add(details_model) - # ####### - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) - for json_data_dict in json_data_dict_list: - if json_data_dict is not None: - json_data = db.session.query(JsonDataModel.id).filter_by(hash=json_data_dict["hash"]).first() - if json_data is None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + json_data_dict_mapping: dict[str, JsonDataDict] = {} + TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping) + TaskService.insert_or_update_json_data_records(json_data_dict_mapping) self.add_event_to_process_instance( self.process_instance_model, @@ -1853,6 +1819,13 @@ class ProcessInstanceProcessor: user_id=user.id, ) + task_service = TaskService( + process_instance=self.process_instance_model, + serializer=self._serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + task_service.process_parents_and_children_and_save_to_database(spiff_task) + # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) self.save() @@ -1924,6 +1897,9 @@ class ProcessInstanceProcessor: all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]] + def get_task_by_guid(self, task_guid: str) -> Optional[SpiffTask]: + return self.bpmn_process_instance.get_task_from_id(UUID(task_guid)) + @classmethod def get_task_by_bpmn_identifier( cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index 2d2bc4df..9021ab4d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,9 +1,9 @@ +import contextlib import time +from typing import Generator from typing import List from typing import Optional -from flask import current_app - from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -26,28 +26,32 @@ class ProcessInstanceIsAlreadyLockedError(Exception): class ProcessInstanceQueueService: """TODO: comment.""" - @staticmethod - def enqueue(process_instance: ProcessInstanceModel) -> None: - queue_item = ProcessInstanceLockService.try_unlock(process_instance.id) - - if queue_item is None: - queue_item = ProcessInstanceQueueModel(process_instance_id=process_instance.id) - + @classmethod + def _configure_and_save_queue_entry( + cls, process_instance: ProcessInstanceModel, queue_entry: ProcessInstanceQueueModel + ) -> None: # TODO: configurable params (priority/run_at) - queue_item.run_at_in_seconds = round(time.time()) - queue_item.priority = 2 - queue_item.status = process_instance.status - queue_item.locked_by = None - queue_item.locked_at_in_seconds = None + queue_entry.run_at_in_seconds = round(time.time()) + queue_entry.priority = 2 + queue_entry.status = process_instance.status + queue_entry.locked_by = None + queue_entry.locked_at_in_seconds = None - db.session.add(queue_item) + db.session.add(queue_entry) db.session.commit() - @staticmethod - def dequeue(process_instance: ProcessInstanceModel) -> None: - if ProcessInstanceLockService.has_lock(process_instance.id): - return + @classmethod + def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel) -> None: + queue_entry = ProcessInstanceQueueModel(process_instance_id=process_instance.id) + cls._configure_and_save_queue_entry(process_instance, queue_entry) + @classmethod + def _enqueue(cls, process_instance: ProcessInstanceModel) -> None: + queue_entry = ProcessInstanceLockService.unlock(process_instance.id) + cls._configure_and_save_queue_entry(process_instance, queue_entry) + + @classmethod + def _dequeue(cls, process_instance: ProcessInstanceModel) -> None: locked_by = ProcessInstanceLockService.locked_by() db.session.query(ProcessInstanceQueueModel).filter( @@ -82,6 +86,18 @@ class ProcessInstanceQueueService: ProcessInstanceLockService.lock(process_instance.id, queue_entry) + @classmethod + @contextlib.contextmanager + def dequeued(cls, process_instance: ProcessInstanceModel) -> Generator[None, None, None]: + reentering_lock = ProcessInstanceLockService.has_lock(process_instance.id) + try: + if not reentering_lock: + cls._dequeue(process_instance) + yield + finally: + if not reentering_lock: + cls._enqueue(process_instance) + @classmethod def entries_with_status( cls, @@ -105,31 +121,3 @@ class ProcessInstanceQueueService: queue_entries = cls.entries_with_status(status_value, None) ids_with_status = [entry.process_instance_id for entry in queue_entries] return ids_with_status - - @classmethod - def dequeue_many( - cls, - status_value: str = ProcessInstanceStatus.waiting.value, - ) -> List[int]: - locked_by = ProcessInstanceLockService.locked_by() - - # TODO: configurable params (priority/run_at/limit) - db.session.query(ProcessInstanceQueueModel).filter( - ProcessInstanceQueueModel.status == status_value, - ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore - ).update( - { - "locked_by": locked_by, - } - ) - - db.session.commit() - - queue_entries = cls.entries_with_status(status_value, locked_by) - - locked_ids = ProcessInstanceLockService.lock_many(queue_entries) - - if len(locked_ids) > 0: - current_app.logger.info(f"{locked_by} dequeued_many: {locked_ids}") - - return locked_ids diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 0f62a738..cbf25bb6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -309,11 +309,14 @@ class ProcessInstanceReportService: ) -> list[dict]: """Add_metadata_columns_to_process_instance.""" results = [] - for process_instance in process_instance_sqlalchemy_rows: - process_instance_dict = process_instance["ProcessInstanceModel"].serialized + for process_instance_row in process_instance_sqlalchemy_rows: + process_instance_mapping = process_instance_row._mapping + process_instance_dict = process_instance_row[0].serialized for metadata_column in metadata_columns: if metadata_column["accessor"] not in process_instance_dict: - process_instance_dict[metadata_column["accessor"]] = process_instance[metadata_column["accessor"]] + process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[ + metadata_column["accessor"] + ] results.append(process_instance_dict) return results diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 4daabd58..3ec3ab4d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -70,6 +70,7 @@ class ProcessInstanceService: ) db.session.add(process_instance_model) db.session.commit() + ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model) return process_instance_model @classmethod @@ -111,9 +112,7 @@ class ProcessInstanceService: .filter(ProcessInstanceModel.id.in_(process_instance_ids_to_check)) # type: ignore .all() ) - process_instance_lock_prefix = "Background" for process_instance in records: - locked = False processor = None try: current_app.logger.info(f"Processing process_instance {process_instance.id}") @@ -122,8 +121,6 @@ class ProcessInstanceService: current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}") continue - processor.lock_process_instance(process_instance_lock_prefix) - locked = True db.session.refresh(process_instance) if process_instance.status == status_value: execution_strategy_name = current_app.config[ @@ -134,17 +131,11 @@ class ProcessInstanceService: continue except Exception as e: db.session.rollback() # in case the above left the database with a bad transaction - process_instance.status = ProcessInstanceStatus.error.value - db.session.add(process_instance) - db.session.commit() error_message = ( f"Error running waiting task for process_instance {process_instance.id}" + f"({process_instance.process_model_identifier}). {str(e)}" ) current_app.logger.error(error_message) - finally: - if locked and processor: - processor.unlock_process_instance(process_instance_lock_prefix) @staticmethod def processor_to_process_instance_api( @@ -157,8 +148,7 @@ class ProcessInstanceService: # navigation = processor.bpmn_process_instance.get_deep_nav_list() # ProcessInstanceService.update_navigation(navigation, processor) process_model_service = ProcessModelService() - process_model = process_model_service.get_process_model(processor.process_model_identifier) - process_model.display_name if process_model else "" + process_model_service.get_process_model(processor.process_model_identifier) process_instance_api = ProcessInstanceApi( id=processor.get_process_instance_id(), status=processor.get_status(), @@ -278,6 +268,10 @@ class ProcessInstanceService: for list_index, list_value in enumerate(value): if isinstance(list_value, str): yield (identifier, list_value, list_index) + if isinstance(list_value, dict) and len(list_value) == 1: + for v in list_value.values(): + if isinstance(v, str): + yield (identifier, v, list_index) @classmethod def file_data_models_for_data( @@ -308,7 +302,11 @@ class ProcessInstanceService: if model.list_index is None: data[model.identifier] = digest_reference else: - data[model.identifier][model.list_index] = digest_reference + old_value = data[model.identifier][model.list_index] + new_value: Any = digest_reference + if isinstance(old_value, dict) and len(old_value) == 1: + new_value = {k: digest_reference for k in old_value.keys()} + data[model.identifier][model.list_index] = new_value @classmethod def save_file_data_and_replace_with_digest_references( @@ -324,6 +322,21 @@ class ProcessInstanceService: cls.replace_file_data_with_digest_references(data, models) + @staticmethod + def update_form_task_data( + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + data: dict[str, Any], + user: UserModel, + ) -> None: + AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) + ProcessInstanceService.save_file_data_and_replace_with_digest_references( + data, + processor.process_instance_model.id, + ) + dot_dct = ProcessInstanceService.create_dot_dict(data) + spiff_task.update_data(dot_dct) + @staticmethod def complete_form_task( processor: ProcessInstanceProcessor, @@ -337,15 +350,7 @@ class ProcessInstanceService: Abstracted here because we need to do it multiple times when completing all tasks in a multi-instance task. """ - AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) - - ProcessInstanceService.save_file_data_and_replace_with_digest_references( - data, - processor.process_instance_model.id, - ) - - dot_dct = ProcessInstanceService.create_dot_dict(data) - spiff_task.update_data(dot_dct) + ProcessInstanceService.update_form_task_data(processor, spiff_task, data, user) # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. processor.complete_task(spiff_task, human_task, user=user) @@ -404,7 +409,6 @@ class ProcessInstanceService: spiff_task: SpiffTask, add_docs_and_forms: bool = False, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ) -> Task: """Spiff_task_to_api_task.""" task_type = spiff_task.task_spec.spec_type @@ -443,7 +447,6 @@ class ProcessInstanceService: event_definition=serialized_task_spec.get("event_definition"), call_activity_process_identifier=call_activity_process_identifier, calling_subprocess_task_id=calling_subprocess_task_id, - task_spiff_step=task_spiff_step, ) return task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 5a03f387..2e904a07 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,4 +1,6 @@ +import copy import json +import time from hashlib import sha256 from typing import Optional from typing import Tuple @@ -9,17 +11,26 @@ from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +class StartAndEndTimes(TypedDict): + start_in_seconds: Optional[float] + end_in_seconds: Optional[float] + + class JsonDataDict(TypedDict): hash: str data: dict @@ -28,6 +39,161 @@ class JsonDataDict(TypedDict): class TaskService: PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" + def __init__( + self, + process_instance: ProcessInstanceModel, + serializer: BpmnWorkflowSerializer, + bpmn_definition_to_task_definitions_mappings: dict, + ) -> None: + self.process_instance = process_instance + self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings + self.serializer = serializer + + self.bpmn_processes: dict[str, BpmnProcessModel] = {} + self.task_models: dict[str, TaskModel] = {} + self.json_data_dicts: dict[str, JsonDataDict] = {} + self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} + + def save_objects_to_database(self) -> None: + db.session.bulk_save_objects(self.bpmn_processes.values()) + db.session.bulk_save_objects(self.task_models.values()) + db.session.bulk_save_objects(self.process_instance_events.values()) + self.__class__.insert_or_update_json_data_records(self.json_data_dicts) + + def process_parents_and_children_and_save_to_database( + self, + spiff_task: SpiffTask, + ) -> None: + self.process_spiff_task_children(spiff_task) + self.process_spiff_task_parent_subprocess_tasks(spiff_task) + self.save_objects_to_database() + + def process_spiff_task_children( + self, + spiff_task: SpiffTask, + ) -> None: + for child_spiff_task in spiff_task.children: + if child_spiff_task._has_state(TaskState.PREDICTED_MASK): + self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) + continue + self.update_task_model_with_spiff_task( + spiff_task=child_spiff_task, + ) + self.process_spiff_task_children( + spiff_task=child_spiff_task, + ) + + def process_spiff_task_parent_subprocess_tasks( + self, + spiff_task: SpiffTask, + ) -> None: + """Find the parent subprocess of a given spiff_task and update its data. + + This will also process that subprocess task's children and will recurse upwards + to process its parent subprocesses as well. + """ + (parent_subprocess_guid, _parent_subprocess) = self.__class__.task_subprocess(spiff_task) + if parent_subprocess_guid is not None: + spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id( + UUID(parent_subprocess_guid) + ) + + if spiff_task_of_parent_subprocess is not None: + self.update_task_model_with_spiff_task( + spiff_task=spiff_task_of_parent_subprocess, + ) + self.process_spiff_task_children( + spiff_task=spiff_task_of_parent_subprocess, + ) + self.process_spiff_task_parent_subprocess_tasks( + spiff_task=spiff_task_of_parent_subprocess, + ) + + def update_task_model_with_spiff_task( + self, + spiff_task: SpiffTask, + task_failed: bool = False, + start_and_end_times: Optional[StartAndEndTimes] = None, + ) -> TaskModel: + new_bpmn_process = None + if str(spiff_task.id) in self.task_models: + task_model = self.task_models[str(spiff_task.id)] + else: + ( + new_bpmn_process, + task_model, + new_task_models, + new_json_data_dicts, + ) = self.__class__.find_or_create_task_model_from_spiff_task( + spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + self.task_models.update(new_task_models) + self.json_data_dicts.update(new_json_data_dicts) + + # we are not sure why task_model.bpmn_process can be None while task_model.bpmn_process_id actually has a valid value + bpmn_process = ( + new_bpmn_process + or task_model.bpmn_process + or BpmnProcessModel.query.filter_by(id=task_model.bpmn_process_id).first() + ) + + bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process( + bpmn_process, spiff_task.workflow.data + ) + json_data_dict_list = self.__class__.update_task_model(task_model, spiff_task, self.serializer) + self.task_models[task_model.guid] = task_model + if bpmn_process_json_data is not None: + json_data_dict_list.append(bpmn_process_json_data) + self.update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) + + if start_and_end_times: + task_model.start_in_seconds = start_and_end_times["start_in_seconds"] + task_model.end_in_seconds = start_and_end_times["end_in_seconds"] + + if task_model.state == "COMPLETED" or task_failed: + event_type = ProcessInstanceEventType.task_completed.value + if task_failed: + event_type = ProcessInstanceEventType.task_failed.value + + # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete + # which script tasks execute when READY. + timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() + process_instance_event = ProcessInstanceEventModel( + task_guid=task_model.guid, + process_instance_id=self.process_instance.id, + event_type=event_type, + timestamp=timestamp, + ) + self.process_instance_events[task_model.guid] = process_instance_event + + self.update_bpmn_process(spiff_task.workflow, bpmn_process) + return task_model + + def update_bpmn_process( + self, + spiff_workflow: BpmnWorkflow, + bpmn_process: BpmnProcessModel, + ) -> None: + new_properties_json = copy.copy(bpmn_process.properties_json) + new_properties_json["last_task"] = str(spiff_workflow.last_task.id) if spiff_workflow.last_task else None + new_properties_json["success"] = spiff_workflow.success + bpmn_process.properties_json = new_properties_json + + bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process(bpmn_process, spiff_workflow.data) + if bpmn_process_json_data is not None: + self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + + self.bpmn_processes[bpmn_process.guid or "top_level"] = bpmn_process + + if spiff_workflow.outer_workflow != spiff_workflow: + direct_parent_bpmn_process = BpmnProcessModel.query.filter_by( + id=bpmn_process.direct_parent_process_id + ).first() + self.update_bpmn_process(spiff_workflow.outer_workflow, direct_parent_bpmn_process) + @classmethod def insert_or_update_json_data_records( cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict] @@ -56,12 +222,18 @@ class TaskService: It also returns the relating json_data object so they can be imported later. """ new_properties_json = serializer.task_to_dict(spiff_task) + if new_properties_json["task_spec"] == "Start": + new_properties_json["parent"] = None spiff_task_data = new_properties_json.pop("data") python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] - json_data_dict = cls.update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") - python_env_dict = cls.update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") + json_data_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, spiff_task_data, "json_data_hash" + ) + python_env_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, python_env_data_dict, "python_env_data_hash" + ) return [json_data_dict, python_env_dict] @classmethod @@ -108,9 +280,9 @@ class TaskService: for sp_id, sp in top_level_workflow.subprocesses.items(): if sp == my_wf: my_sp = sp - my_sp_id = sp_id + my_sp_id = str(sp_id) break - return (str(my_sp_id), my_sp) + return (my_sp_id, my_sp) @classmethod def task_bpmn_process( @@ -144,7 +316,7 @@ class TaskService: bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process( bpmn_process_dict=serializer.workflow_to_dict(subprocess), process_instance=process_instance, - bpmn_process_parent=process_instance.bpmn_process, + top_level_process=process_instance.bpmn_process, bpmn_process_guid=subprocess_guid, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, @@ -160,7 +332,7 @@ class TaskService: bpmn_definition_to_task_definitions_mappings: dict, spiff_workflow: BpmnWorkflow, serializer: BpmnWorkflowSerializer, - bpmn_process_parent: Optional[BpmnProcessModel] = None, + top_level_process: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: """This creates and adds a bpmn_process to the Db session. @@ -178,13 +350,13 @@ class TaskService: if "subprocess_specs" in bpmn_process_dict: bpmn_process_dict.pop("subprocess_specs") - new_task_models = {} + new_task_models: dict[str, TaskModel] = {} new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None - if bpmn_process_parent is not None: + if top_level_process is not None: bpmn_process = BpmnProcessModel.query.filter_by( - parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid + top_level_process_id=top_level_process.id, guid=bpmn_process_guid ).first() elif process_instance.bpmn_process_id is not None: bpmn_process = process_instance.bpmn_process @@ -194,6 +366,32 @@ class TaskService: bpmn_process_is_new = True bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) + bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ + "bpmn_process_definition" + ] + bpmn_process.bpmn_process_definition = bpmn_process_definition + + if top_level_process is not None: + subprocesses = spiff_workflow._get_outermost_workflow().subprocesses + direct_bpmn_process_parent = top_level_process + for subprocess_guid, subprocess in subprocesses.items(): + if subprocess == spiff_workflow.outer_workflow: + direct_bpmn_process_parent = BpmnProcessModel.query.filter_by( + guid=str(subprocess_guid) + ).first() + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError( + f"Could not find bpmn process with guid: {str(subprocess_guid)} " + f"while searching for direct parent process of {bpmn_process_guid}." + ) + + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError( + f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}" + ) + + bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id + # Point the root id to the Start task instead of the Root task # since we are ignoring the Root task. for task_id, task_properties in tasks.items(): @@ -206,15 +404,10 @@ class TaskService: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data - if bpmn_process_parent is None: + if top_level_process is None: process_instance.bpmn_process = bpmn_process - elif bpmn_process.parent_process_id is None: - bpmn_process.parent_process_id = bpmn_process_parent.id - - bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ - "bpmn_process_definition" - ] - bpmn_process.bpmn_process_definition = bpmn_process_definition + elif bpmn_process.top_level_process_id is None: + bpmn_process.top_level_process_id = top_level_process.id # Since we bulk insert tasks later we need to add the bpmn_process to the session # to ensure we have an id. @@ -227,12 +420,13 @@ class TaskService: # bpmn process defintion so let's avoid using it. if task_properties["task_spec"] == "Root": continue - if task_properties["task_spec"] == "Start": - task_properties["parent"] = None - task_data_dict = task_properties.pop("data") - state_int = task_properties["state"] - spiff_task = spiff_workflow.get_task(UUID(task_id)) + # we are going to avoid saving likely and maybe tasks to the db. + # that means we need to remove them from their parents' lists of children as well. + spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) + if spiff_task._has_state(TaskState.PREDICTED_MASK): + cls.remove_spiff_task_from_parent(spiff_task, new_task_models) + continue task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: @@ -242,25 +436,29 @@ class TaskService: spiff_task, bpmn_definition_to_task_definitions_mappings, ) - task_model.state = TaskStateNames[state_int] - task_model.properties_json = task_properties - new_task_models[task_model.guid] = task_model - json_data_dict = TaskService.update_task_data_on_task_model( - task_model, task_data_dict, "json_data_hash" - ) + json_data_dict, python_env_dict = cls.update_task_model(task_model, spiff_task, serializer) + + new_task_models[task_model.guid] = task_model if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict - - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) - python_env_dict = TaskService.update_task_data_on_task_model( - task_model, python_env_data_dict, "python_env_data_hash" - ) if python_env_dict is not None: new_json_data_dicts[python_env_dict["hash"]] = python_env_dict - return (bpmn_process, new_task_models, new_json_data_dicts) + @classmethod + def remove_spiff_task_from_parent(cls, spiff_task: SpiffTask, task_models: dict[str, TaskModel]) -> None: + """Removes the given spiff task from its parent and then updates the task_models dict with the changes.""" + spiff_task_parent_guid = str(spiff_task.parent.id) + spiff_task_guid = str(spiff_task.id) + if spiff_task_parent_guid in task_models: + parent_task_model = task_models[spiff_task_parent_guid] + if spiff_task_guid in parent_task_model.properties_json["children"]: + new_parent_properties_json = copy.copy(parent_task_model.properties_json) + new_parent_properties_json["children"].remove(spiff_task_guid) + parent_task_model.properties_json = new_parent_properties_json + task_models[spiff_task_parent_guid] = parent_task_model + @classmethod def update_task_data_on_bpmn_process( cls, bpmn_process: BpmnProcessModel, bpmn_process_data_dict: dict @@ -274,7 +472,11 @@ class TaskService: return json_data_dict @classmethod - def update_task_data_on_task_model( + def insert_or_update_json_data_dict(cls, json_data_dict: JsonDataDict) -> None: + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) + + @classmethod + def update_task_data_on_task_model_and_return_dict_if_updated( cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) @@ -285,6 +487,65 @@ class TaskService: setattr(task_model, task_model_data_column, task_data_hash) return json_data_dict + @classmethod + def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: + bpmn_process_ids = [p.id for p in bpmn_processes] + direct_children = BpmnProcessModel.query.filter( + BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids) # type: ignore + ).all() + if len(direct_children) > 0: + return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) + return bpmn_processes + + @classmethod + def task_models_of_parent_bpmn_processes( + cls, task_model: TaskModel + ) -> Tuple[list[BpmnProcessModel], list[TaskModel]]: + bpmn_process = task_model.bpmn_process + task_models: list[TaskModel] = [] + bpmn_processes: list[BpmnProcessModel] = [bpmn_process] + if bpmn_process.guid is not None: + parent_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first() + if parent_task_model is not None: + b, t = cls.task_models_of_parent_bpmn_processes(parent_task_model) + return (bpmn_processes + b, [parent_task_model] + t) + return (bpmn_processes, task_models) + + @classmethod + def reset_task_model_dict( + cls, + task_model: dict, + state: str, + ) -> None: + task_model["state"] = state + task_model["start_in_seconds"] = None + task_model["end_in_seconds"] = None + + @classmethod + def reset_task_model( + cls, + task_model: TaskModel, + state: str, + json_data_hash: Optional[str] = None, + python_env_data_hash: Optional[str] = None, + ) -> None: + if json_data_hash is None: + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "json_data_hash") + else: + task_model.json_data_hash = json_data_hash + if python_env_data_hash is None: + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "python_env_data") + else: + task_model.python_env_data_hash = python_env_data_hash + + task_model.state = state + task_model.start_in_seconds = None + task_model.end_in_seconds = None + + new_properties_json = copy.copy(task_model.properties_json) + new_properties_json["state"] = getattr(TaskState, state) + task_model.properties_json = new_properties_json + @classmethod def _create_task( cls, @@ -312,3 +573,11 @@ class TaskService: # this helps to convert items like datetime objects to be json serializable converted_data: dict = serializer.data_converter.convert(user_defined_state) return converted_data + + @classmethod + def update_json_data_dicts_using_list( + cls, json_data_dict_list: list[Optional[JsonDataDict]], json_data_dicts: dict[str, JsonDataDict] + ) -> None: + for json_data_dict in json_data_dict_list: + if json_data_dict is not None: + json_data_dicts[json_data_dict["hash"]] = json_data_dict diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 4d44308b..e578cc13 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,8 +1,8 @@ -import logging import time from typing import Callable -from typing import List from typing import Optional +from typing import Set +from uuid import UUID from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore @@ -17,15 +17,12 @@ from spiffworkflow_backend.models.message_instance_correlation import ( MessageInstanceCorrelationRuleModel, ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, ) -from spiffworkflow_backend.services.task_service import JsonDataDict +from spiffworkflow_backend.services.task_service import StartAndEndTimes from spiffworkflow_backend.services.task_service import TaskService @@ -45,10 +42,6 @@ class EngineStepDelegate: pass -SpiffStepIncrementer = Callable[[], None] -SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict] - - class TaskModelSavingDelegate(EngineStepDelegate): """Engine step delegate that takes care of saving a task model to the database. @@ -67,26 +60,44 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings self.serializer = serializer - self.current_task_model: Optional[TaskModel] = None self.current_task_start_in_seconds: Optional[float] = None - self.task_models: dict[str, TaskModel] = {} - self.json_data_dicts: dict[str, JsonDataDict] = {} - self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} + self.last_completed_spiff_task: Optional[SpiffTask] = None + self.spiff_tasks_to_process: Set[UUID] = set() + self.spiff_task_timestamps: dict[UUID, StartAndEndTimes] = {} + + self.task_service = TaskService( + process_instance=self.process_instance, + serializer=self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): + self.spiff_task_timestamps[spiff_task.id] = {"start_in_seconds": time.time(), "end_in_seconds": None} + spiff_task.task_spec._predict(spiff_task, mask=TaskState.NOT_FINISHED_MASK) + self.current_task_start_in_seconds = time.time() + if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.will_complete_task(spiff_task) def did_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): - task_model = self._update_task_model_with_spiff_task(spiff_task) + # NOTE: used with process-all-tasks and process-children-of-last-task + task_model = self.task_service.update_task_model_with_spiff_task(spiff_task) if self.current_task_start_in_seconds is None: raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend") task_model.start_in_seconds = self.current_task_start_in_seconds task_model.end_in_seconds = time.time() + + # # NOTE: used with process-spiff-tasks-list + # self.spiff_task_timestamps[spiff_task.id]['end_in_seconds'] = time.time() + # self.spiff_tasks_to_process.add(spiff_task.id) + # self._add_children(spiff_task) + # # self._add_parents(spiff_task) + + self.last_completed_spiff_task = spiff_task if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -94,24 +105,76 @@ class TaskModelSavingDelegate(EngineStepDelegate): script_engine = bpmn_process_instance.script_engine if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: failing_spiff_task = script_engine.failing_spiff_task - self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) + self.task_service.update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) + self.task_service.process_spiff_task_parent_subprocess_tasks(failing_spiff_task) + self.task_service.process_spiff_task_children(failing_spiff_task) - db.session.bulk_save_objects(self.task_models.values()) - db.session.bulk_save_objects(self.process_instance_events.values()) - - TaskService.insert_or_update_json_data_records(self.json_data_dicts) + self.task_service.save_objects_to_database() if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False) db.session.commit() + def _add_children(self, spiff_task: SpiffTask) -> None: + for child_spiff_task in spiff_task.children: + self.spiff_tasks_to_process.add(child_spiff_task.id) + self._add_children(child_spiff_task) + + def _add_parents(self, spiff_task: SpiffTask) -> None: + if spiff_task.parent and spiff_task.parent.task_spec.name != "Root": + self.spiff_tasks_to_process.add(spiff_task.parent.id) + self._add_parents(spiff_task.parent) + def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: if self._should_update_task_model(): - # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. + # NOTE: process-all-tasks: All tests pass with this but it's less efficient and would be nice to replace + # excludes COMPLETED. the others were required to get PP1 to go to completion. + # process FUTURE tasks because Boundary events are not processed otherwise. for waiting_spiff_task in bpmn_process_instance.get_tasks( - TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY + TaskState.WAITING + | TaskState.CANCELLED + | TaskState.READY + | TaskState.MAYBE + | TaskState.LIKELY + | TaskState.FUTURE ): - self._update_task_model_with_spiff_task(waiting_spiff_task) + if waiting_spiff_task._has_state(TaskState.PREDICTED_MASK): + TaskService.remove_spiff_task_from_parent(waiting_spiff_task, self.task_service.task_models) + continue + self.task_service.update_task_model_with_spiff_task(waiting_spiff_task) + + # # NOTE: process-spiff-tasks-list: this would be the ideal way to handle all tasks + # # but we're missing something with it yet + # # + # # adding from line here until we are ready to go with this + # from SpiffWorkflow.exceptions import TaskNotFoundException + # for spiff_task_uuid in self.spiff_tasks_to_process: + # try: + # waiting_spiff_task = bpmn_process_instance.get_task_from_id(spiff_task_uuid) + # except TaskNotFoundException: + # continue + # + # # include PREDICTED_MASK tasks in list so we can remove them from the parent + # if waiting_spiff_task._has_state(TaskState.PREDICTED_MASK): + # TaskService.remove_spiff_task_from_parent(waiting_spiff_task, self.task_service.task_models) + # for cpt in waiting_spiff_task.parent.children: + # if cpt.id == waiting_spiff_task.id: + # waiting_spiff_task.parent.children.remove(cpt) + # continue + # # if waiting_spiff_task.state == TaskState.FUTURE: + # # continue + # start_and_end_times = None + # if waiting_spiff_task.id in self.spiff_task_timestamps: + # start_and_end_times = self.spiff_task_timestamps[waiting_spiff_task.id] + # self.task_service.update_task_model_with_spiff_task(waiting_spiff_task, start_and_end_times=start_and_end_times) + # + # if self.last_completed_spiff_task is not None: + # self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) + + # # NOTE: process-children-of-last-task: this does not work with escalation boundary events + # if self.last_completed_spiff_task is not None: + # self.task_service.process_spiff_task_children(self.last_completed_spiff_task) + # self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) def _should_update_task_model(self) -> bool: """We need to figure out if we have previously save task info on this process intance. @@ -121,101 +184,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): # return self.process_instance.bpmn_process_id is not None return True - def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: - for json_data_dict in json_data_dict_list: - if json_data_dict is not None: - self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - - def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel: - bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, - self.process_instance, - self.serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) - ) - bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( - bpmn_process or task_model.bpmn_process, spiff_task.workflow.data - ) - self.task_models.update(new_task_models) - self.json_data_dicts.update(new_json_data_dicts) - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) - self.task_models[task_model.guid] = task_model - if bpmn_process_json_data is not None: - json_data_dict_list.append(bpmn_process_json_data) - self._update_json_data_dicts_using_list(json_data_dict_list) - - if task_model.state == "COMPLETED" or task_failed: - event_type = ProcessInstanceEventType.task_completed.value - if task_failed: - event_type = ProcessInstanceEventType.task_failed.value - - # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete - # which script tasks execute when READY. - timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() - process_instance_event = ProcessInstanceEventModel( - task_guid=task_model.guid, - process_instance_id=self.process_instance.id, - event_type=event_type, - timestamp=timestamp, - ) - self.process_instance_events[task_model.guid] = process_instance_event - - return task_model - - -class StepDetailLoggingDelegate(EngineStepDelegate): - """Engine step delegate that takes care of logging spiff step details. - - This separates the concerns of step execution and step logging. - """ - - def __init__( - self, - increment_spiff_step: SpiffStepIncrementer, - spiff_step_details_mapping: SpiffStepDetailsMappingBuilder, - ): - """__init__.""" - self.increment_spiff_step = increment_spiff_step - self.spiff_step_details_mapping = spiff_step_details_mapping - self.step_details: List[dict] = [] - self.current_task_start_in_seconds = 0.0 - self.tasks_to_log = { - "BPMN Task", - "Script Task", - "Service Task", - "Default Start Event", - "Exclusive Gateway", - "Call Activity", - # "End Join", - "End Event", - "Default Throwing Event", - "Subprocess", - "Transactional Subprocess", - } - - def should_log(self, spiff_task: SpiffTask) -> bool: - return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith( - ".EndJoin" - ) - - def will_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.current_task_start_in_seconds = time.time() - self.increment_spiff_step() - - def did_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.step_details.append( - self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time()) - ) - - def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None: - db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details) - if commit: - db.session.commit() - class ExecutionStrategy: """Interface of sorts for a concrete execution strategy.""" @@ -223,13 +191,12 @@ class ExecutionStrategy: def __init__(self, delegate: EngineStepDelegate): """__init__.""" self.delegate = delegate - self.bpmn_process_instance = None def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: pass - def save(self) -> None: - self.delegate.save(self.bpmn_process_instance) + def save(self, bpmn_process_instance: BpmnWorkflow) -> None: + self.delegate.save(bpmn_process_instance) class GreedyExecutionStrategy(ExecutionStrategy): @@ -335,11 +302,7 @@ class WorkflowExecutionService: raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe finally: - self.execution_strategy.save() - spiff_logger = logging.getLogger("spiff") - for handler in spiff_logger.handlers: - if hasattr(handler, "bulk_insert_logs"): - handler.bulk_insert_logs() # type: ignore + self.execution_strategy.save(self.bpmn_process_instance) db.session.commit() if save: @@ -363,6 +326,8 @@ class WorkflowExecutionService: if bpmn_process is not None: bpmn_process_correlations = self.bpmn_process_instance.correlations bpmn_process.properties_json["correlations"] = bpmn_process_correlations + # update correlations correctly but always null out bpmn_messages since they get cleared out later + bpmn_process.properties_json["bpmn_messages"] = [] db.session.add(bpmn_process) db.session.commit() diff --git a/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn b/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn new file mode 100644 index 00000000..eff8cd2f --- /dev/null +++ b/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn @@ -0,0 +1,116 @@ + + + + + Flow_1dk6oyl + + + Flow_0s9lss3 + Flow_02xy1ag + Flow_11uu31d + + + + Flow_0sw85uk + Flow_0s9lss3 + x=1 + + + Flow_02xy1ag + + + x==2 + + + + + Flow_1dk6oyl + Flow_11uu31d + Flow_0sw85uk + + Flow_0ih1i19 + + + + Flow_0dua5j8 + + + + + HEY MANUAL + + Flow_0ih1i19 + Flow_0dua5j8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn b/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn index f4d0190b..ac1486e4 100644 --- a/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn @@ -7,8 +7,8 @@ Flow_0nnh2x9 - - + + ## Hello @@ -16,7 +16,7 @@ Flow_0nnh2x9 - + Flow_0stlaxe Flow_1pmem7s @@ -31,7 +31,7 @@ - + diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 939c8c0b..e7817523 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -4,15 +4,15 @@ Flow_0stlaxe - + Flow_1ygcsbt - + ## Hello - Flow_1fktmf7 Flow_1t9ywmr + Flow_0q30935 Flow_09gjylo @@ -21,12 +21,11 @@ Flow_1fktmf7 set_in_top_level_script = 1 - - - - + + + Flow_09gjylo - Flow_1i7syph + Flow_0yxus36 Flow_00k1tii @@ -47,8 +46,8 @@ except: we_move_on = False - - Flow_1i7syph + + Flow_0yxus36 Flow_187mcqe @@ -61,12 +60,18 @@ except: we_move_on == True - + Flow_0lw7sda Flow_1ygcsbt set_top_level_process_script_after_gate = 1 - + + + + + Flow_1fktmf7 + Flow_0q30935 + @@ -74,25 +79,35 @@ except: - + + + + - - + + + - + + - + + - + - + + + + + @@ -100,33 +115,37 @@ except: - - - - - + - - + + - - + + - - + + - - + + - - - - + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index 299f078e..064365d8 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -1,38 +1,141 @@ - - Flow_06g687y - - - - Flow_01e21r0 + + Flow_089aeua - + + Flow_1qsx5et + + + Flow_1qsx5et + Flow_0zedtvv + + Flow_0bkk554 + + + Flow_1cnuh2a + Flow_17hgw9g + set_in_test_process_to_call_subprocess_script = 1 + + + Flow_17hgw9g + + + + + + Flow_0bkk554 + Flow_1cnuh2a + + Flow_1nri60d + + + + Flow_1bfzrzu + + + + Flow_1nri60d + Flow_1bfzrzu + set_in_test_process_to_call_subprocess_subprocess_script = 1 + + + + + + - Flow_06g687y - Flow_01e21r0 + Flow_0zedtvv + Flow_089aeua set_in_test_process_to_call_script = 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + - + - - + + - - - + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn index 7452216a..36acf7ab 100644 --- a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn +++ b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn @@ -21,7 +21,7 @@ outer['time'] = time.time_ns() Flow_18gs4jt Flow_1flxgry - outer["inner"] = 'sweet2' + outer["inner"] = 'sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2END_THIS_WILL_TRUNCATE_HERE' diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn index 540a0e12..d53c8184 100644 --- a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn +++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn @@ -27,7 +27,7 @@ - + Flow_1q47ol8 @@ -36,7 +36,7 @@ Flow_1w3n49n - + Flow_1vld4r2 Flow_13ai5vv @@ -44,7 +44,7 @@ "PT1H" - + Click the button. @@ -91,7 +91,7 @@ - + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 5f483fdd..03620228 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -78,7 +78,7 @@ class BaseTest: if bpmn_file_location is None: bpmn_file_location = process_model_id - self.create_process_group(client, user, process_group_description, process_group_display_name) + self.create_process_group_with_api(client, user, process_group_description, process_group_display_name) self.create_process_model_with_api( client, @@ -97,6 +97,15 @@ class BaseTest: return process_model_identifier def create_process_group( + self, + process_group_id: str, + display_name: str = "", + ) -> ProcessGroup: + """Create_process_group.""" + process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False) + return ProcessModelService.add_process_group(process_group) + + def create_process_group_with_api( self, client: FlaskClient, user: Any, @@ -295,7 +304,7 @@ class BaseTest: db.session.add(process_instance) db.session.commit() - ProcessInstanceQueueService.enqueue(process_instance) + ProcessInstanceQueueService.enqueue_new_process_instance(process_instance) return process_instance @@ -353,3 +362,20 @@ class BaseTest: def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str: """Un_modify_modified_process_model_id.""" return modified_identifier.replace(":", "/") + + def create_process_model_with_metadata(self) -> ProcessModelInfo: + self.create_process_group("test_group", "test_group") + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "awesome_var", "path": "outer.inner"}, + {"key": "invoice_number", "path": "invoice_number"}, + ] + }, + ) + return process_model diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f79a3295..7890e156 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -25,7 +25,7 @@ class TestLoggingService(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() @@ -85,7 +85,7 @@ class TestLoggingService(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() @@ -114,7 +114,7 @@ class TestLoggingService(BaseTest): process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) headers = self.logged_in_headers(with_super_admin_user) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 600bcb66..c5623f47 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -118,7 +118,7 @@ class TestProcessApi(BaseTest): process_group_id = "test_process_group" process_group_display_name = "Test Process Group" # creates the group directory, and the json file - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_display_name) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_display_name) process_model_id = "sample" model_display_name = "Sample" @@ -169,7 +169,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) text = "Create a Bug Tracker process model " text += "with a Bug Details form that collects summary, description, and priority" @@ -237,7 +237,9 @@ class TestProcessApi(BaseTest): process_model_identifier = f"{process_group_id}/{process_model_id}" initial_primary_process_id = "sample" terminal_primary_process_id = "new_process_id" - self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id) + self.create_process_group_with_api( + client=client, user=with_super_admin_user, process_group_id=process_group_id + ) bpmn_file_name = f"{process_model_id}.bpmn" bpmn_file_source_directory = process_model_id @@ -281,7 +283,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -317,7 +319,7 @@ class TestProcessApi(BaseTest): bpmn_file_location = "sample" process_model_identifier = f"{test_process_group_id}/{test_process_model_id}" modified_process_model_identifier = process_model_identifier.replace("/", ":") - self.create_process_group(client, with_super_admin_user, test_process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, test_process_group_id) self.create_process_model_with_api(client, process_model_identifier, user=with_super_admin_user) bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( @@ -362,7 +364,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_update.""" - self.create_process_group(client, with_super_admin_user, "test_process_group", "Test Process Group") + self.create_process_group_with_api(client, with_super_admin_user, "test_process_group", "Test Process Group") process_model_identifier = "test_process_group/make_cookies" self.create_process_model_with_api( client, @@ -403,7 +405,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_process_model_list_all.""" group_id = "test_group/test_sub_group" - self.create_process_group(client, with_super_admin_user, group_id) + self.create_process_group_with_api(client, with_super_admin_user, group_id) # add 5 models to the group for i in range(5): @@ -439,7 +441,7 @@ class TestProcessApi(BaseTest): """Test_process_model_list.""" # create a group group_id = "test_group" - self.create_process_group(client, with_super_admin_user, group_id) + self.create_process_group_with_api(client, with_super_admin_user, group_id) # add 5 models to the group for i in range(5): @@ -603,7 +605,7 @@ class TestProcessApi(BaseTest): process_group_id = "test" process_group_display_name = "My Process Group" - self.create_process_group( + self.create_process_group_with_api( client, with_super_admin_user, process_group_id, @@ -632,7 +634,7 @@ class TestProcessApi(BaseTest): group_id = "test_process_group" group_display_name = "Test Group" - self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) + self.create_process_group_with_api(client, with_super_admin_user, group_id, display_name=group_display_name) process_group = ProcessModelService.get_process_group(group_id) assert process_group.display_name == group_display_name @@ -662,7 +664,9 @@ class TestProcessApi(BaseTest): for i in range(5): group_id = f"test_process_group_{i}" group_display_name = f"Test Group {i}" - self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) + self.create_process_group_with_api( + client, with_super_admin_user, group_id, display_name=group_display_name + ) # get all groups response = client.get( @@ -787,7 +791,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Group" process_model_id = "random_fact" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -1091,7 +1095,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_get_process_model_when_not_found.""" process_model_dir_name = "THIS_NO_EXISTS" - group_id = self.create_process_group(client, with_super_admin_user, "my_group") + group_id = self.create_process_group_with_api(client, with_super_admin_user, "my_group") bad_process_model_id = f"{group_id}/{process_model_dir_name}" modified_bad_process_model_id = bad_process_model_id.replace("/", ":") response = client.get( @@ -2612,6 +2616,8 @@ class TestProcessApi(BaseTest): content_type="application/json", data=json.dumps(data), ) + assert response.status_code == 200 + assert response.json is not None assert response.json["status"] == "complete" response = client.get( @@ -2619,9 +2625,9 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - end_task = next(task for task in response.json if task["type"] == "End Event") + end_task = next(task for task in response.json if task["bpmn_identifier"] == "Event_174a838") response = client.get( - f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['task_spiff_step']}", + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -2637,9 +2643,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_script_unit_test_run.""" process_group_id = "test_group" - process_model_id = "process_navigation" - bpmn_file_name = "process_navigation.bpmn" - bpmn_file_location = "process_navigation" + process_model_id = "manual_task" + bpmn_file_name = "manual_task.bpmn" + bpmn_file_location = "manual_task" process_model_identifier = self.create_group_and_model_with_bpmn( client=client, user=with_super_admin_user, @@ -2670,35 +2676,21 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) - data = { - "dateTime": "PT1H", - "external": True, - "internal": True, - "label": "Event_0e4owa3", - "typename": "TimerEventDefinition", - } - response = client.post( - f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", - headers=self.logged_in_headers(with_super_admin_user), - content_type="application/json", - data=json.dumps(data), - ) - response = client.get( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info", headers=self.logged_in_headers(with_super_admin_user), ) - assert len(response.json) == 1 - task = response.json[0] + assert len(response.json) == 7 + human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one") response = client.post( - f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", + f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{human_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), content_type="application/json", data=json.dumps({"execute": False}), ) assert response.json["status"] == "suspended" - task_model = TaskModel.query.filter_by(guid=task["id"]).first() + task_model = TaskModel.query.filter_by(guid=human_task["guid"]).first() assert task_model is not None assert task_model.state == "COMPLETED" @@ -2707,14 +2699,14 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - assert len(response.json) == 1 + assert len(response.json) == 7 def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" groups = ["group_a", "group_b", "group_b/group_bb"] # setup initial groups for group in groups: - self.create_process_group(client, with_super_admin_user, group, display_name=group) + self.create_process_group_with_api(client, with_super_admin_user, group, display_name=group) # make sure initial groups exist for group in groups: persisted = ProcessModelService.get_process_group(group) @@ -2783,7 +2775,7 @@ class TestProcessApi(BaseTest): sub_group_id = "sub_group" original_location = "group_a" original_sub_path = f"{original_location}/{sub_group_id}" - self.create_process_group(client, with_super_admin_user, original_sub_path, display_name=sub_group_id) + self.create_process_group_with_api(client, with_super_admin_user, original_sub_path, display_name=sub_group_id) # make sure original subgroup exists persisted = ProcessModelService.get_process_group(original_sub_path) assert persisted is not None @@ -2835,7 +2827,7 @@ class TestProcessApi(BaseTest): # ) # # process_group_id = "test_group" - # self.create_process_group( + # self.create_process_group_with_api( # client, with_super_admin_user, process_group_id, process_group_id # ) # @@ -3077,6 +3069,18 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = self.create_process_model_with_metadata() + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + process_model = load_test_spec( process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", @@ -3115,11 +3119,35 @@ class TestProcessApi(BaseTest): "filterable": False, }, {"Header": "Status", "accessor": "status", "filterable": False}, + {"Header": "awesome_var", "accessor": "awesome_var", "filterable": True}, + {"Header": "invoice_number", "accessor": "invoice_number", "filterable": True}, {"Header": "key1", "accessor": "key1", "filterable": True}, {"Header": "key2", "accessor": "key2", "filterable": True}, {"Header": "key3", "accessor": "key3", "filterable": True}, ] + # pluck accessor from each dict in list + accessors = [column["accessor"] for column in response.json] + stock_columns = [ + "id", + "process_model_display_name", + "start_in_seconds", + "end_in_seconds", + "process_initiator_username", + "status", + ] + assert accessors == stock_columns + ["awesome_var", "invoice_number", "key1", "key2", "key3"] + + # expected columns are fewer if we filter by process_model_identifier + response = client.get( + "/v1.0/process-instances/reports/columns?process_model_identifier=save_process_instance_metadata/save_process_instance_metadata", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.json is not None + assert response.status_code == 200 + accessors = [column["accessor"] for column in response.json] + assert accessors == stock_columns + ["key1", "key2", "key3"] + def test_process_instance_list_can_order_by_metadata( self, app: Flask, @@ -3128,7 +3156,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_instance_list_can_order_by_metadata.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", process_model_source_directory="nested-task-data-structure", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py index 3e19607d..e12a1dd5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py @@ -34,7 +34,7 @@ class SecretServiceTestHelpers(BaseTest): def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo: """Add_test_process.""" - self.create_process_group( + self.create_process_group_with_api( client, user, self.test_process_group_id, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py index 3a128cff..685788c3 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py @@ -38,7 +38,7 @@ class TestGetGroupMembers(BaseTest): UserService.add_user_to_group(testuser2, group_a) UserService.add_user_to_group(testuser3, group_b) - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( process_model_id="test_group/get_group_members", bpmn_file_name="get_group_members.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py index 5f0e40d3..fcd8b641 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py @@ -23,7 +23,7 @@ class TestGetLastUserCompletingTask(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 31d2aa69..9595c948 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -54,7 +54,7 @@ class TestGetLocaltime(BaseTest): target_uri="/v1.0/process-groups", permission_names=["read", "create"], ) - self.create_process_group(client=client, user=initiator_user, process_group_id="test_group") + self.create_process_group_with_api(client=client, user=initiator_user, process_group_id="test_group") process_model = load_test_spec( process_model_id="test_group/get_localtime", bpmn_file_name="get_localtime.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py index 84ac7c27..60a93f9a 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py @@ -23,7 +23,7 @@ class TestGetProcessInitiatorUser(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py index d0202a64..bf64b21d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -24,7 +24,7 @@ class TestSaveProcessInstanceMetadata(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_save_process_instance_metadata.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index d41ae3e9..adbd2240 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -91,7 +91,6 @@ class TestErrorHandlingService(BaseTest): # Both send and receive messages should be generated, matched # and considered complete. messages = db.session.query(MessageInstanceModel).all() - # import pdb; pdb.set_trace() assert 2 == len(messages) assert "completed" == messages[0].status assert "completed" == messages[1].status diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py index 2d2f7baa..403c2323 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py @@ -153,7 +153,7 @@ class TestMessageService(BaseTest): group_name: str = "test_group", ) -> None: process_group_id = group_name - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model = load_test_spec( "test_group/message", @@ -222,7 +222,7 @@ class TestMessageService(BaseTest): ) -> None: """Test_can_send_message_to_multiple_process_models.""" process_group_id = "test_group_multi" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_sender = load_test_spec( "test_group/message_sender", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py index b81164c1..f229bdf7 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py @@ -33,7 +33,7 @@ class TestPermissions(BaseTest): ) -> None: """Test_user_can_be_given_permission_to_administer_process_group.""" process_group_id = "group-a" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) load_test_spec( "group-a/timers_intermediate_catch_event", bpmn_file_name="timers_intermediate_catch_event.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 70f97328..d0d4eb73 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -5,16 +5,19 @@ import pytest from flask import g from flask.app import Flask from flask.testing import FlaskClient -from SpiffWorkflow.task import TaskState # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import ( @@ -23,9 +26,6 @@ from spiffworkflow_backend.services.authorization_service import ( from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_instance_queue_service import ( - ProcessInstanceIsAlreadyLockedError, -) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -72,7 +72,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user = self.find_or_create_user("testuser2") assert initiator_user.principal is not None @@ -140,7 +140,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task_when_using_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") finance_user_four = self.find_or_create_user("testuser4") @@ -256,6 +256,129 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED + def test_properly_resets_process_to_given_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task", + process_model_source_directory="manual_task", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + assert spiff_manual_task is not None + + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id)) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + processor.do_engine_steps(save=True) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + assert process_instance.status == "complete" + + def test_properly_resets_process_to_given_task_with_call_activity( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task_with_subprocesses", + process_model_source_directory="manual_task_with_subprocesses", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + processor.suspend() + task_model_to_reset_to = ( + TaskModel.query.join(TaskDefinitionModel) + .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script") + .order_by(TaskModel.id.desc()) # type: ignore + .first() + ) + assert task_model_to_reset_to is not None + ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid) + + # make sure sqlalchemy session matches current db state + db.session.expire_all() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + + # make sure we reset to the task we expected + ready_or_waiting_tasks = processor.get_all_ready_or_waiting_tasks() + top_level_subprocess_script_spiff_task = next( + task for task in ready_or_waiting_tasks if task.task_spec.name == "top_level_subprocess_script" + ) + assert top_level_subprocess_script_spiff_task is not None + processor.resume() + processor.do_engine_steps(save=True) + + assert len(process_instance.active_human_tasks) == 1 + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + assert process_instance.status == "complete" + def test_properly_saves_tasks_when_running( self, app: Flask, @@ -263,8 +386,7 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_does_not_recreate_human_tasks_on_multiple_saves.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None @@ -302,7 +424,10 @@ class TestProcessInstanceProcessor(BaseTest): process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) # recreate variables to ensure all bpmn json was recreated from scratch from the db @@ -310,38 +435,94 @@ class TestProcessInstanceProcessor(BaseTest): processor_final = ProcessInstanceProcessor(process_instance_relookup) assert process_instance_relookup.status == "complete" - first_data_set = {"set_in_top_level_script": 1} - second_data_set = { - **first_data_set, + data_set_1 = {"set_in_top_level_script": 1} + data_set_2 = { + **data_set_1, **{"set_in_top_level_subprocess": 1, "we_move_on": False}, } - third_data_set = { - **second_data_set, - **{"set_in_test_process_to_call_script": 1}, + data_set_3 = { + **data_set_2, + **{ + "set_in_test_process_to_call_subprocess_subprocess_script": 1, + "set_in_test_process_to_call_subprocess_script": 1, + }, } - fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} - fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} + data_set_4 = { + **data_set_3, + **{ + "set_in_test_process_to_call_script": 1, + }, + } + data_set_5 = {**data_set_4, **{"a": 1, "we_move_on": True}} + data_set_6 = {**data_set_5, **{"set_top_level_process_script_after_gate": 1}} + data_set_7 = {**data_set_6, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} expected_task_data = { - "top_level_script": first_data_set, - "manual_task": first_data_set, - "top_level_subprocess_script": second_data_set, - "top_level_subprocess": second_data_set, - "test_process_to_call_script": third_data_set, - "top_level_call_activity": third_data_set, - "end_event_of_manual_task_model": third_data_set, - "top_level_subprocess_script_second": fourth_data_set, - "test_process_to_call_script_second": fourth_data_set, + "top_level_script": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_one": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_two": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, + "top_level_subprocess_script": { + "data": data_set_2, + "bpmn_process_identifier": "top_level_subprocess", + }, + "top_level_subprocess": {"data": data_set_2, "bpmn_process_identifier": "top_level_process"}, + "test_process_to_call_subprocess_script": { + "data": data_set_3, + "bpmn_process_identifier": "test_process_to_call_subprocess", + }, + "top_level_call_activity": {"data": data_set_4, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_two_second": { + "data": data_set_4, + "bpmn_process_identifier": "top_level_process", + }, + "top_level_subprocess_script_second": { + "data": data_set_5, + "bpmn_process_identifier": "top_level_subprocess", + }, + "top_level_subprocess_second": {"data": data_set_5, "bpmn_process_identifier": "top_level_process"}, + "test_process_to_call_subprocess_script_second": { + "data": data_set_5, + "bpmn_process_identifier": "test_process_to_call_subprocess", + }, + "top_level_call_activity_second": { + "data": data_set_5, + "bpmn_process_identifier": "top_level_process", + }, + "end_event_of_manual_task_model": {"data": data_set_6, "bpmn_process_identifier": "top_level_process"}, } - spiff_tasks_checked_once: list = [] + spiff_tasks_checked: list[str] = [] # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly - def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None: - if spiff_task.task_spec.name == spiff_task_identifier: - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier}." - expected_python_env_data = expected_task_data[spiff_task.task_spec.name] - if spiff_task.task_spec.name in spiff_tasks_checked_once: - expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] + def assert_spiff_task_is_in_process(spiff_task: SpiffTask) -> None: + spiff_task_identifier = spiff_task.task_spec.name + if spiff_task_identifier in expected_task_data: + bpmn_process_identifier = expected_task_data[spiff_task_identifier]["bpmn_process_identifier"] + expected_task_data_key = spiff_task_identifier + if spiff_task_identifier in spiff_tasks_checked: + expected_task_data_key = f"{spiff_task.task_spec.name}_second" + + assert expected_task_data_key not in spiff_tasks_checked + + spiff_tasks_checked.append(expected_task_data_key) + + expected_python_env_data = expected_task_data[expected_task_data_key]["data"] + + base_failure_message = ( + f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key" + f" {expected_task_data_key}." + ) + + count_failure_message = ( + f"{base_failure_message} There are more than 2 entries of this task in the db." + " There should only ever be max 2." + ) + task_models_with_bpmn_identifier_count = ( + TaskModel.query.join(TaskDefinitionModel) + .filter(TaskModel.process_instance_id == process_instance_relookup.id) + .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) + .count() + ) + assert task_models_with_bpmn_identifier_count < 3, count_failure_message task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -351,23 +532,28 @@ class TestProcessInstanceProcessor(BaseTest): task_definition = task_model.task_definition assert task_definition.bpmn_identifier == spiff_task_identifier assert task_definition.bpmn_name == spiff_task_identifier.replace("_", " ").title() - assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier + assert ( + task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier + ), base_failure_message message = ( - f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" + f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:" + f" {sorted(task_model.json_data())}" ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message assert task_model.json_data() == expected_python_env_data, message - spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: + if spiff_task.task_spec.name == "our_boundary_event": + assert spiff_task.state == TaskState.CANCELLED + spiff_tasks_checked.append(spiff_task.task_spec.name) + continue + assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") - assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") - assert_spiff_task_is_in_process("top_level_script", "top_level_process") + assert_spiff_task_is_in_process(spiff_task) if spiff_task.task_spec.name == "top_level_call_activity": # the task id / guid of the call activity gets used as the guid of the bpmn process that it calls @@ -377,8 +563,46 @@ class TestProcessInstanceProcessor(BaseTest): assert bpmn_process_definition is not None assert bpmn_process_definition.bpmn_identifier == "test_process_to_call" assert bpmn_process_definition.bpmn_name == "Test Process To Call" + spiff_tasks_checked.append(spiff_task.task_spec.name) - assert processor.get_data() == fifth_data_set + # Check that the direct parent of the called activity subprocess task is the + # name of the process that was called from the activity. + if spiff_task.task_spec.name == "test_process_to_call_subprocess_script": + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task_model is not None + bpmn_process = task_model.bpmn_process + assert bpmn_process is not None + bpmn_process_definition = bpmn_process.bpmn_process_definition + assert bpmn_process_definition is not None + assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess" + assert bpmn_process.direct_parent_process_id is not None + direct_parent_process = BpmnProcessModel.query.filter_by( + id=bpmn_process.direct_parent_process_id + ).first() + assert direct_parent_process is not None + assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" + spiff_tasks_checked.append(spiff_task.task_spec.name) + + expected_task_identifiers = list(expected_task_data.keys()) + [ + "our_boundary_event", + "test_process_to_call_subprocess_script", + "top_level_call_activity", + ] + for task_bpmn_identifier in expected_task_identifiers: + message = ( + f"Expected to have seen a task with a bpmn_identifier of {task_bpmn_identifier} but did not. " + f"Only saw {sorted(spiff_tasks_checked)}" + ) + assert task_bpmn_identifier in spiff_tasks_checked, message + + task_models_that_are_predicted_count = ( + TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) + .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore + .count() + ) + assert task_models_that_are_predicted_count == 0 + + assert processor.get_data() == data_set_7 def test_does_not_recreate_human_tasks_on_multiple_saves( self, @@ -388,7 +612,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_does_not_recreate_human_tasks_on_multiple_saves.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None @@ -417,43 +641,6 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.active_human_tasks) == 1 assert initial_human_task_id == process_instance.active_human_tasks[0].id - # TODO: port this test to queue_service test - def xxx_test_it_can_lock_and_unlock_a_process_instance( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - ) -> None: - initiator_user = self.find_or_create_user("initiator_user") - process_model = load_test_spec( - process_model_id="test_group/model_with_lanes", - bpmn_file_name="lanes_with_owner_dict.bpmn", - process_model_source_directory="model_with_lanes", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - assert process_instance.locked_by is None - assert process_instance.locked_at_in_seconds is None - processor.lock_process_instance("TEST") - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - assert process_instance.locked_by is not None - assert process_instance.locked_at_in_seconds is not None - - with pytest.raises(ProcessInstanceIsAlreadyLockedError): - processor.lock_process_instance("TEST") - - # with pytest.raises(ProcessInstanceLockedBySomethingElseError): - # processor.unlock_process_instance("TEST2") - - processor.unlock_process_instance("TEST") - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - assert process_instance.locked_by is None - assert process_instance.locked_at_in_seconds is None - def test_it_can_loopback_to_previous_bpmn_task_with_gateway( self, app: Flask, @@ -485,14 +672,47 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.human_tasks) == 2 human_task_two = process_instance.active_human_tasks[0] - # this is just asserting the way the functionality currently works in spiff. - # we would actually expect this to change one day if we stop reusing the same guid - # when we re-do a task. - # assert human_task_two.task_id == human_task_one.task_id - - # EDIT: when using feature/remove-loop-reset branch of SpiffWorkflow, these should be different. assert human_task_two.task_id != human_task_one.task_id + def test_it_can_loopback_to_previous_bpmn_subprocess_with_gateway( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + initiator_user = self.find_or_create_user("initiator_user") + process_model = load_test_spec( + process_model_id="test_group/loopback_to_subprocess", + process_model_source_directory="loopback_to_subprocess", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 1 + human_task_one = process_instance.active_human_tasks[0] + + spiff_task = processor.get_task_by_guid(human_task_one.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_one) + + processor = ProcessInstanceProcessor(process_instance) + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 2 + human_task_two = process_instance.active_human_tasks[0] + spiff_task = processor.get_task_by_guid(human_task_two.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_two) + + # ensure this does not raise a KeyError + processor = ProcessInstanceProcessor(process_instance) + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 3 + human_task_three = process_instance.active_human_tasks[0] + spiff_task = processor.get_task_by_guid(human_task_three.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_three) + def test_task_data_is_set_even_if_process_instance_errors( self, app: Flask, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py new file mode 100644 index 00000000..f676479f --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py @@ -0,0 +1,124 @@ +"""Test_process_instance_queue_service.""" +from contextlib import suppress + +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) + + +class TestProcessInstanceQueueService(BaseTest): + """TestProcessInstanceQueueService.""" + + def _create_process_instance(self) -> ProcessInstanceModel: + initiator_user = self.find_or_create_user("initiator_user") + process_model = load_test_spec( + process_model_id="test_group/model_with_lanes", + bpmn_file_name="lanes.bpmn", + process_model_source_directory="model_with_lanes", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + return process_instance + + def test_newly_created_process_instances_are_not_locked_when_added_to_the_queue( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + assert not ProcessInstanceLockService.has_lock(process_instance.id) + queue_entries = ProcessInstanceQueueService.entries_with_status("not_started", None) + check_passed = False + for entry in queue_entries: + if entry.process_instance_id == process_instance.id: + assert entry.locked_by is None + check_passed = True + break + assert check_passed + + def test_peek_many_can_see_queue_entries_with_a_given_status( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + queue_entry_ids = ProcessInstanceQueueService.peek_many("not_started") + assert process_instance.id in queue_entry_ids + + def test_can_run_some_code_with_a_dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + check_passed = False + with ProcessInstanceQueueService.dequeued(process_instance): + check_passed = True + assert check_passed + + def test_holds_a_lock_for_dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + assert not ProcessInstanceLockService.has_lock(process_instance.id) + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + assert not ProcessInstanceLockService.has_lock(process_instance.id) + + def test_unlocks_if_an_exception_is_thrown_with_a__dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with suppress(Exception): + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + raise Exception("just testing") + + assert not ProcessInstanceLockService.has_lock(process_instance.id) + + def test_can_call_dequeued_mulitple_times( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + def test_can_nest_multiple_dequeued_calls( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with ProcessInstanceQueueService.dequeued(process_instance): + with ProcessInstanceQueueService.dequeued(process_instance): + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + assert ProcessInstanceLockService.has_lock(process_instance.id) + assert ProcessInstanceLockService.has_lock(process_instance.id) + + assert not ProcessInstanceLockService.has_lock(process_instance.id) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py index 436810cc..0c27a538 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py @@ -89,7 +89,7 @@ class TestProcessInstanceService(BaseTest): self._check_sample_file_data_model("uploaded_files", 0, models[0]) self._check_sample_file_data_model("uploaded_files", 1, models[1]) - def test_can_create_file_data_models_for_fix_of_file_data_and_non_file_data_values( + def test_can_create_file_data_models_for_mix_of_file_data_and_non_file_data_values( self, app: Flask, with_db_and_bpmn_file_cleanup: None, @@ -122,6 +122,8 @@ class TestProcessInstanceService(BaseTest): ) -> None: data = { "not_a_file": "just a value", + "also_no_files": ["not a file", "also not a file"], + "still_no_files": [{"key": "value"}], } models = ProcessInstanceService.file_data_models_for_data(data, 111) @@ -189,3 +191,25 @@ class TestProcessInstanceService(BaseTest): ], "not_a_file3": "just a value3", } + + def test_can_create_file_data_models_for_mulitple_single_file_data_values( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + data = { + "File": [ + { + "supporting_files": self.SAMPLE_FILE_DATA, + }, + { + "supporting_files": self.SAMPLE_FILE_DATA, + }, + ], + } + models = ProcessInstanceService.file_data_models_for_data(data, 111) + + assert len(models) == 2 + self._check_sample_file_data_model("File", 0, models[0]) + self._check_sample_file_data_model("File", 1, models[1]) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py index 4d8e1b5b..40a9c96e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py @@ -1,4 +1,6 @@ """Process Model.""" +import re + from flask.app import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest @@ -14,7 +16,6 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_model_service import ProcessModelService class TestProcessModel(BaseTest): @@ -22,7 +23,7 @@ class TestProcessModel(BaseTest): def test_initializes_files_as_empty_array(self) -> None: """Test_initializes_files_as_empty_array.""" - process_model_one = self.create_test_process_model(id="model_one", display_name="Model One") + process_model_one = self._create_test_process_model(id="model_one", display_name="Model One") assert process_model_one.files == [] def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory( @@ -33,7 +34,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_test", # bpmn_file_name="call_activity_test.bpmn", @@ -53,7 +54,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -84,7 +85,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -120,20 +121,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") - process_model = load_test_spec( - "test_group/hello_world", - process_model_source_directory="nested-task-data-structure", - ) - ProcessModelService.update_process_model( - process_model, - { - "metadata_extraction_paths": [ - {"key": "awesome_var", "path": "outer.inner"}, - {"key": "invoice_number", "path": "invoice_number"}, - ] - }, - ) + process_model = self.create_process_model_with_metadata() process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) @@ -144,14 +132,17 @@ class TestProcessModel(BaseTest): process_instance_id=process_instance.id, key="awesome_var" ).first() assert process_instance_metadata_awesome_var is not None - assert process_instance_metadata_awesome_var.value == "sweet2" + + # notion 160: ensure that we truncate long values to 255 characters + assert re.match(r"^sweet2.*END$", process_instance_metadata_awesome_var.value) + assert len(process_instance_metadata_awesome_var.value) == 255 process_instance_metadata_awesome_var = ProcessInstanceMetadataModel.query.filter_by( process_instance_id=process_instance.id, key="invoice_number" ).first() assert process_instance_metadata_awesome_var is not None assert process_instance_metadata_awesome_var.value == "123" - def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: + def _create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: """Create_test_process_model.""" return ProcessModelInfo( id=id, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py index 79d52888..0ff8bf46 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py @@ -19,7 +19,7 @@ class TestProcessModelService(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_update_specified_attributes.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", bpmn_file_name="hello_world.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py index e0b1535d..330d115f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py @@ -23,7 +23,7 @@ class TestOpenFile(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_etc_passwd.bpmn", @@ -50,7 +50,7 @@ class TestImportModule(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_env.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py index 0fc3ee66..f5eef2e8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py @@ -26,7 +26,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" load_test_spec( @@ -62,7 +62,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -99,7 +99,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -132,7 +132,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" diff --git a/spiffworkflow-frontend/.eslintrc.js b/spiffworkflow-frontend/.eslintrc.js index b6829ff4..5ed20900 100644 --- a/spiffworkflow-frontend/.eslintrc.js +++ b/spiffworkflow-frontend/.eslintrc.js @@ -25,6 +25,10 @@ module.exports = { }, plugins: ['react', 'sonarjs', '@typescript-eslint'], rules: { + // according to https://github.com/typescript-eslint/typescript-eslint/issues/2621, You should turn off the eslint core rule and turn on the typescript-eslint rule + // but not sure which of the above "extends" statements is maybe bringing in eslint core + 'no-shadow': 'off', + '@typescript-eslint/no-shadow': ['error'], 'jest/expect-expect': 'off', 'react/jsx-no-bind': 'off', 'jsx-a11y/no-autofocus': 'off', @@ -37,7 +41,8 @@ module.exports = { 'react/react-in-jsx-scope': 'off', 'react/require-default-props': 'off', 'import/prefer-default-export': 'off', - 'no-unused-vars': [ + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': [ 'error', { destructuredArrayIgnorePattern: '^_', diff --git a/spiffworkflow-frontend/bin/cypress_pilot b/spiffworkflow-frontend/bin/cypress_pilot index ecf50455..fb480a7d 100755 --- a/spiffworkflow-frontend/bin/cypress_pilot +++ b/spiffworkflow-frontend/bin/cypress_pilot @@ -44,7 +44,9 @@ for attempt in $(seq 1 "$ATTEMPTS" ); do start_time=$(date +%s) success="false" - if ./node_modules/.bin/cypress "$command" -c specPattern="cypress/pilot/**/*.cy.{js,jsx,ts,tsx}" --e2e --browser chrome "$@"; then + # spec_pattern="cypress/pilot/**/*.cy.{js,jsx,ts,tsx}" + spec_pattern="cypress/pilot/*.cy.{js,jsx,ts,tsx}" + if ./node_modules/.bin/cypress "$command" -c specPattern="${spec_pattern}" --e2e --browser chrome "$@"; then success="true" fi end_time=$(date +%s) @@ -56,6 +58,9 @@ for attempt in $(seq 1 "$ATTEMPTS" ); do formatted_end_time=$(date "-d@${end_time}" +"%Y-%m-%dT%H-%M-%S") fi - echo "${success},$(( end_time - start_time )),${formatted_start_time},${formatted_end_time},${frontend_url}" >>"$cypress_run_file" + if [[ "$command" != "open" ]]; then + echo "Recording stats to ${cypress_run_file}" + echo "${success},$(( end_time - start_time )),${formatted_start_time},${formatted_end_time},${frontend_url}" >>"$cypress_run_file" + fi done echo "Recorded stats to ${cypress_run_file}" diff --git a/spiffworkflow-frontend/cypress.env.json b/spiffworkflow-frontend/cypress.env.json new file mode 100644 index 00000000..6a14dd82 --- /dev/null +++ b/spiffworkflow-frontend/cypress.env.json @@ -0,0 +1,17 @@ +{ + "project_id": "18606", + "requestor_username": "core-a1.contributor", + "requestor_password": "core-a1.contributor", + "budgetowner_username": "fluffy.project-lead", + "budgetowner_password": "fluffy.project-lead", + "peopleopssme_username": "peopleops.partner-a1.sme", + "peopleopssme_password": "peopleops.partner-a1.sme", + "ppgbasme_username": "ppg.ba-a1.sme", + "ppgbasme_password": "ppg.ba-a1.sme", + "securitysme_username": "security-a1.sme", + "securitysme_password": "security-a1.sme", + "infrasme_username": "infra-a1.sme", + "infrasme_password": "infra-a1.sme", + "legalsme_username": "legal-a1.sme", + "legalsme_password": "legal-a1.sme" +} diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index 64e0418a..aa0c6626 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -162,7 +162,7 @@ describe('process-instances', () => { cy.getBySel('process-instance-log-list-link').click(); cy.getBySel('process-instance-log-detailed').click(); cy.contains('process_model_one'); - cy.contains('State change to COMPLETED'); + cy.contains('task_completed'); cy.basicPaginationTest(); }); @@ -182,6 +182,9 @@ describe('process-instances', () => { cy.url().should('include', `status=${processStatus}`); cy.assertAtLeastOneItemInPaginatedResults(); cy.getBySel(`process-instance-status-${processStatus}`); + + // maybe waiting a bit before trying to click makes this work consistently? + cy.wait(1000); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); cy.get('div[aria-label="Clear all selected items"]').should( diff --git a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js index 06e59d81..a4b4a4dd 100644 --- a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js @@ -50,18 +50,19 @@ describe('tasks', () => { ); cy.contains('Task: get_user_generated_number_three'); - cy.getBySel('form-nav-form2').click(); - checkFormFieldIsReadOnly( - 'get_user_generated_number_two', - '#root_user_generated_number_2' - ); - cy.getBySel('form-nav-form1').click(); - checkFormFieldIsReadOnly( - 'get_user_generated_number_one', - '#root_user_generated_number_1' - ); - - cy.getBySel('form-nav-form3').click(); + // TODO: remove this if we decide to completely kill form navigation + // cy.getBySel('form-nav-form2').click(); + // checkFormFieldIsReadOnly( + // 'get_user_generated_number_two', + // '#root_user_generated_number_2' + // ); + // cy.getBySel('form-nav-form1').click(); + // checkFormFieldIsReadOnly( + // 'get_user_generated_number_one', + // '#root_user_generated_number_1' + // ); + // + // cy.getBySel('form-nav-form3').click(); submitInputIntoFormField( 'get_user_generated_number_three', '#root_user_generated_number_3', diff --git a/spiffworkflow-frontend/cypress/fixtures/Free_Test_Data_1MB_PDF.pdf b/spiffworkflow-frontend/cypress/fixtures/Free_Test_Data_1MB_PDF.pdf new file mode 100644 index 00000000..5c8b5b20 Binary files /dev/null and b/spiffworkflow-frontend/cypress/fixtures/Free_Test_Data_1MB_PDF.pdf differ diff --git a/spiffworkflow-frontend/cypress/fixtures/lorem-ipsum.pdf b/spiffworkflow-frontend/cypress/fixtures/lorem-ipsum.pdf new file mode 100644 index 00000000..22ace57c Binary files /dev/null and b/spiffworkflow-frontend/cypress/fixtures/lorem-ipsum.pdf differ diff --git a/spiffworkflow-frontend/cypress/fixtures/png-5mb-1.png b/spiffworkflow-frontend/cypress/fixtures/png-5mb-1.png new file mode 100644 index 00000000..879b2441 Binary files /dev/null and b/spiffworkflow-frontend/cypress/fixtures/png-5mb-1.png differ diff --git a/spiffworkflow-frontend/cypress/fixtures/png-5mb-2.png b/spiffworkflow-frontend/cypress/fixtures/png-5mb-2.png new file mode 100644 index 00000000..879b2441 Binary files /dev/null and b/spiffworkflow-frontend/cypress/fixtures/png-5mb-2.png differ diff --git a/spiffworkflow-frontend/cypress/fixtures/sampletext.txt b/spiffworkflow-frontend/cypress/fixtures/sampletext.txt new file mode 100644 index 00000000..648665f6 --- /dev/null +++ b/spiffworkflow-frontend/cypress/fixtures/sampletext.txt @@ -0,0 +1,59 @@ + + + + + Flow_1wvr4fo + + + + Flow_14wkay5 + + + + + + + + + + + + + Flow_1wvr4fo + Flow_03aokn9 + + + + Flow_03aokn9 + Flow_14wkay5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-frontend/cypress/pilot/NDR_PP1/consultingfees.cy.js b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/consultingfees.cy.js new file mode 100644 index 00000000..df431918 --- /dev/null +++ b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/consultingfees.cy.js @@ -0,0 +1,856 @@ +const submitWithUser = ( + username, + password, + processInstanceId, + expectAdditionalApprovalInfoPage = false, + approvaltype +) => { + cy.wait(2000); + cy.log('========Login with : ', username); + cy.log('========processInstanceId: ', processInstanceId); + cy.login(username, password); + + cy.wait(1000); + cy.log('=======visit find by id : '); + cy.visit('/admin/process-instances/find-by-id'); + cy.get('#process-instance-id-input').type(processInstanceId); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks I can complete', { timeout: 60000 }); + + cy.get('.cds--btn').contains(/^Go$/).click(); + + cy.wait(2000); + // approve! + if (approvaltype === "approve") { + cy.get('#root > label:nth-child(1)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is approving this.')); + } else if (approvaltype === "reject") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is rejecting this.')); + } else if (approvaltype === "needmoreinfo") { + cy.get('#root > label:nth-child(2)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info. A software license is a document that provides legally binding guidelines for the use and distribution of software.Software licenses typically provide end users with the right to END.')); + } else if (approvaltype === "providemoreinfo") { + //Form 1 + cy.contains('Task: Submit New Demand Request Details', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 2 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 3 + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 }); + + cy.get('.cds--text-area__wrapper').find('#root').clear().type('Providing additional info. It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got people for jobs of any size.'); + + cy.contains('Submit the Request').click(); + cy.get('input[value="Submit the Request"]').click(); + + } else { + + } + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + if (expectAdditionalApprovalInfoPage) { + cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 }); + + cy.get('button') + .contains(/^Continue$/) + .click(); + + } + cy.location({ timeout: 60000 }).should((loc) => { + expect(loc.pathname).to.eq('/tasks'); + }); + cy.wait(2000); + cy.logout(); + cy.wait(2000); +}; + +//Consulting Fees Path - Without Files +describe('Consulting Fees Path - Without Files', () => { + Cypress._.times(5, () => { + //Budget owner approves the request + it('Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Consulting ==== Management consulting includes a broad range of activities, and the many firms and their members often define these practices quite differently. One way to categorize the activities is in terms of the professional’s area of expertise.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-12-25'); + cy.get('#root_vendor').clear().type('Embassar'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('ambassadors'); + cy.get('#root_0_item').clear().type('An ambassador is an official envoy, especially a high-ranking diplomat who represents a state.'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('ETH'); + cy.get('#root_0_unit_price').type('1.15'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, \‘consultant\’ and advisor\’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Consulting is defined as the practise of providing a third party with expertise on a matter in exchange for a fee. The service may involve either advisory or implementation services.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-10-02'); + cy.get('#root_vendor').clear().type('Consultancy.uk'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //Item 0 + cy.get('#root_0_sub_category').select('consultants'); + cy.get('#root_0_item').clear().type('Software development consultants with Python background'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('DAI'); + cy.get('#root_0_unit_price').type('1500'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Freelancing - Freelancing is doing specific work for clients without committing to full-time employment. Freelancers often take on multiple projects with different clients simultaneously. IRS considers freelancers to be self-employed individuals.'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-04-15'); + cy.get('#root_vendor').clear().type('Upwork'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('freelancers'); + cy.get('#root_0_item').clear().type('Freelancers to do the Python development and front end react app development'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('1750'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got freelancers for jobs of any size or budget across 1800 skills. No job is too complex.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + }); +}); + +//Consulting Fees Path - With Files +describe('Consulting Fees Path - With Files', () => { + Cypress._.times(1, () => { + //Budget owner approves the request + it('Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Consulting ==== Management consulting includes a broad range of activities, and the many firms and their members often define these practices quite differently. One way to categorize the activities is in terms of the professional’s area of expertise.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-12-25'); + cy.get('#root_vendor').clear().type('Embassar'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('ambassadors'); + cy.get('#root_0_item').clear().type('An ambassador is an official envoy, especially a high-ranking diplomat who represents a state.'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('ETH'); + cy.get('#root_0_unit_price').type('1.15'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Consulting is defined as the practise of providing a third party with expertise on a matter in exchange for a fee. The service may involve either advisory or implementation services.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-10-02'); + cy.get('#root_vendor').clear().type('Consultancy.uk'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('consultants'); + cy.get('#root_0_item').clear().type('Software development consultants with Python background'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('DAI'); + cy.get('#root_0_unit_price').type('1500'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('consult_fees'); + cy.get('#root_purpose').clear().type('Freelancing - Freelancing is doing specific work for clients without committing to full-time employment. Freelancers often take on multiple projects with different clients simultaneously. IRS considers freelancers to be self-employed individuals.'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-04-15'); + cy.get('#root_vendor').clear().type('Upwork'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('freelancers'); + cy.get('#root_0_item').clear().type('Freelancers to do the Python development and front end react app development'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('1750'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('consultants'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('freelancers'); + cy.get('#root_2_item').clear().type('Find & hire top freelancers, web developers & designers inexpensively. '); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got freelancers for jobs of any size or budget across 1800 skills. No job is too complex.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + }); +}); + + diff --git a/spiffworkflow-frontend/cypress/pilot/NDR_PP1/equipment.cy.js b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/equipment.cy.js new file mode 100644 index 00000000..4e521696 --- /dev/null +++ b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/equipment.cy.js @@ -0,0 +1,2234 @@ +const submitWithUser = ( + username, + password, + processInstanceId, + expectAdditionalApprovalInfoPage = false, + approvaltype +) => { + cy.wait(2000); + cy.log('========Login with : ', username); + cy.log('========processInstanceId: ', processInstanceId); + cy.login(username, password); + + cy.wait(1000); + cy.log('=======visit find by id : '); + cy.visit('/admin/process-instances/find-by-id'); + cy.get('#process-instance-id-input').type(processInstanceId); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks I can complete', { timeout: 60000 }); + + cy.get('.cds--btn').contains(/^Go$/).click(); + + cy.wait(2000); + // approve! + if (approvaltype === "approve") { + cy.get('#root > label:nth-child(1)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is approving this.')); + } else if (approvaltype === "reject") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is rejecting this.')); + } else if (approvaltype === "needmoreinfo") { + cy.get('#root > label:nth-child(2)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info. By contrast, software is the set of instructions that can be stored and run by hardware. Hardware is so-termed because it is "hard" or rigid with respect to changes, whereas software is "soft" END.')); + } else if (approvaltype === "escalateBO") { + cy.get('#root > label:nth-child(1)').click(); + cy.get('.field-boolean > label:nth-child(1)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is escalating to Budget owner.')); + } else if (approvaltype === "providemoreinfo") { + //Form 1 + cy.contains('Task: Submit New Demand Request Details', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 2 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 3 + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 }); + + cy.get('.cds--text-area__wrapper').find('#root').clear().type('Providing additional info. Computer hardware includes the physical parts of a computer, such as the case, central processing unit (CPU), random access memory (RAM), monitor, mouse, keyboard, computer data storage, graphics card, sound card'); + + cy.contains('Submit the Request').click(); + cy.get('input[value="Submit the Request"]').click(); + + } else { + + } + + cy.get('button') + .contains(/^Submit$/) + .click(); + + if (expectAdditionalApprovalInfoPage) { + cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 }); + + cy.get('button') + .contains(/^Continue$/) + .click(); + + } + cy.location({ timeout: 60000 }).should((loc) => { + expect(loc.pathname).to.eq('/tasks'); + }); + cy.wait(2000); + cy.logout(); + cy.wait(2000); +}; + +//Equipment Path - Without Files +describe('Equipment Path - Without Files', () => { + + Cypress._.times(1, () => { + //Out of Policy. People Ops Partner Group and Budget owner approves the request + it('Out of Policy. People Ops Partner Group and Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('By contrast, software is the set of instructions that can be stored and run by hardware. Hardware is so-termed because it is "hard" or rigid with respect to changes, whereas software is "soft" because it is easy to change..'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Mech Tech'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Hardware is typically directed by the software to execute any command or instruction'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('5000'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('ledger'); + cy.get('#root_1_item').clear().type('A mainframe computer is a much larger computer that typically fills a room and may cost many hundred'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('yubikey'); + cy.get('#root_2_item').clear().type('A supercomputer is superficially similar to a mainframe but is instead intended for extremely demand'); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('ETH'); + cy.get('#root_2_unit_price').type('2.34'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 3 + cy.get('#root_3_sub_category').select('mic_and_head'); + cy.get('#root_3_item').clear().type('The term supercomputer does not refer to a specific technology.'); + cy.get('#root_3_qty').clear().type('6'); + cy.get('#root_3_currency_type').select('Crypto'); + cy.get('#root_3_currency').select('SNT'); + cy.get('#root_3_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann. This describes a design architecture for a electronic digital computer with subdivisions of a processing unit'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Out of Policy. People Ops Partner Group approves and Budget owner rejects the request + it('Out of Policy. People Ops Partner Group approves and Budget owner rejects ', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Electronic appliances and services related to the personal computer, including the PC (desktop or laptop), and communication between computers and the services required by intercommunication networks. These fundamentally include'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('BestBUY'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('yubikey'); + cy.get('#root_0_item').clear().type('Output devices are designed around the senses of human beings. For example, monitors display text'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('3200'); + + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('When using computer hardware, an upgrade means adding new or additional hardware to a computer that improves its performance, increases its capacity, or adds new features. For example, \nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + + }); + }); + + //Out of Policy. People Ops Partner Group approves and Budget owner request for additional details + it('Out of Policy. People Ops Partner Group approves and Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Computer hardware includes the physical parts of a computer, such as the case, central processing unit (CPU), random access memory (RAM), monitor, mouse, keyboard, computer data storage, graphics card, sound card, speakers and motherboard.[1][2]'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Amazon.com'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Rather it indicates the fastest computations available at any given time. In mid-2011, the fastest'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('4000'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A supercomputer is superficially similar to a mainframe but is instead intended for extremely demanding computational tasks. As of November 2021, the fastest supercomputer on the TOP500 supercomputer list is Fugaku, in Japan'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + // people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves the request + it('Within Policy. People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('By contrast, software is the set of instructions that can be stored and run by hardware. Hardware is so-termed because it is "hard" or rigid with respect to changes, whereas software is "soft" because it is easy to change.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('EBAY'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Because computer parts contain hazardous materials, there is a growing movement to recycle '); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('HKD'); + cy.get('#root_0_unit_price').type('1236'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Computer hardware contain dangerous chemicals such as lead, mercury, nickel, and cadmium. According to the EPA these e-wastes have a harmful effect on the environment unless they are disposed properly. \nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Within Policy. People Ops Partner Group rejects the request + it('Within Policy. People Ops Partner Group rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann. This describes a design architecture for a electronic digital computer with subdivisions of a processing unit'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Best Buy'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Computer components contain many toxic substances, like dioxins, polychlorinated biphenyls (PCBs)'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AED'); + cy.get('#root_0_unit_price').type('320'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, \‘consultant\’ and advisor\’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Within Policy. People Ops Partner Group request additional info + it('Within Policy. People Ops Partner Group needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case. The computer case holds the motherboard'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Walmart'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('As computer hardware contain a wide number of metals inside, the United States Environmental'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('CAD'); + cy.get('#root_0_unit_price').type('435'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann. \nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //peopleops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + //Within Policy. People Ops Partner Group and Budget owner approves the request + it('Within Policy. People Ops Partner Group and Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Laptops are designed for portability but operate similarly to desktop PCs.[5] They may use lower-power or reduced size components, with lower performance than a similarly priced desktop computer'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Fry\'s'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Recycling a computer is made easier by a few of the national services, such as Dell and Apple.'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('1200'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann. \nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves and Budget owner rejects the request + it('Within Policy. People Ops Partner Group approves and Budget owner rejects ', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The computer case encloses most of the components of the system. It provides mechanical support and protection for internal elements such as the motherboard, disk drives, and power supplies, and controls and directs the flow of cooling air over int. '); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('BestBUY'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Because computer parts contain hazardous materials, there is a growing movement to recycle old'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves and Budget owner request for additional details + it('Within Policy. People Ops Partner Group approves and Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The motherboard is the main component of a computer. It is a board with integrated circuitry that connects the other parts of the computer including the CPU, the RAM, the disk drives (CD, DVD, hard disk, or any others) as well as any peripherals'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Amazon.com'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Recycling a computer is made easier by a few of the national services, such as Dell and Apple.'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('400'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + }); +}); + +//Equipment Path - With Files +describe('Equipment Path - With Files', () => { + + Cypress._.times(1, () => { + //Out of Policy. People Ops Partner Group and Budget owner approves the request + it('Out of Policy. People Ops Partner Group and Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Equipment most commonly refers to a set of tools or other objects commonly used to achieve a particular objective. Different jobs require different kinds of equipment.\nhttps://en.wikipedia.org/wiki/Equipment'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Mech Tech'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('When using computer hardware, an upgrade means adding new or additional hardware to a computer that'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('12300'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('ledger'); + cy.get('#root_1_item').clear().type('A mainframe computer is a much larger computer that typically fills a room and may cost many hundred'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('yubikey'); + cy.get('#root_2_item').clear().type('A supercomputer is superficially similar to a mainframe but is instead intended for extremely demand'); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('ETH'); + cy.get('#root_2_unit_price').type('2.10'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 3 + cy.get('#root_3_sub_category').select('mic_and_head'); + cy.get('#root_3_item').clear().type('The term supercomputer does not refer to a specific technology.'); + cy.get('#root_3_qty').clear().type('6'); + cy.get('#root_3_currency_type').select('Crypto'); + cy.get('#root_3_currency').select('SNT'); + cy.get('#root_3_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Out of Policy. People Ops Partner Group approves and Budget owner rejects the request + it('Out of Policy. People Ops Partner Group approves and Budget owner rejects ', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The motherboard is the main component of a computer. It is a board with integrated circuitry that connects the other parts of the computer including the CPU, the RAM, the disk drives (CD, DVD, hard disk, or any others) as well as any peripherals'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('BestBUY'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Because computer parts contain hazardous materials, there is a growing movement to recycle old'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('3000'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + + }); + }); + + //Out of Policy. People Ops Partner Group approves and Budget owner request for additional details + it('Out of Policy. People Ops Partner Group approves and Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('The CPU (central processing unit), which performs most of the calculations which enable a computer to function, and is referred to as the brain of the computer.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Amazon.com'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Because computer parts contain hazardous materials, there is a growing movement to recycle old'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('4000'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves the request + it('Within Policy. People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('An expansion card in computing is a printed circuit board that can be inserted into an expansion slot of a computer motherboard or backplane to add functionality to a computer system via the expansion bus. Expansion cards can be used to obtain'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Amazon'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Recycling a computer is made easier by a few of the national services, such as Dell and Apple.'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('CHF'); + cy.get('#root_0_unit_price').type('240'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Within Policy. People Ops Partner Group rejects the request + it('Within Policy. People Ops Partner Group rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Storage device is any computing hardware and digital media that is used for storing, porting and extracting data files and objects. It can hold and store information both temporarily and permanently and can be internal or external to a computer.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Best Buy'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('ledger'); + cy.get('#root_0_item').clear().type('The central processing unit contains many toxic materials. It contains lead and chromium in metal'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('CNY'); + cy.get('#root_0_unit_price').type('1560'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann..\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Within Policy. People Ops Partner Group request additional info + it('Within Policy. People Ops Partner Group needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Data is stored by a computer using a variety of media. Hard disk drives (HDDs) are found in virtually all older computers, due to their high capacity and low cost, but solid-state drives (SSDs) are faster and more power efficient.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Fry\'s'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('yubikey'); + cy.get('#root_0_item').clear().type('Data is stored by a computer using a variety of media. Hard disk drives (HDDs) are found'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('COP'); + cy.get('#root_0_unit_price').type('1230'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann..\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + //Within Policy. People Ops Partner Group and Budget owner approves the request + it('Within Policy. People Ops Partner Group and Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('To transfer data between computers, an external flash memory device (such as a memory card or USB flash drive) or optical disc (such as a CD-ROM, DVD-ROM or BD-ROM) may be used. '); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Amazon.com'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('mic_and_head'); + cy.get('#root_0_item').clear().type('Data is stored by a computer using a variety of media. Hard disk drives (HDDs) are found'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('ETB'); + cy.get('#root_0_unit_price').type('3200'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann..\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves and Budget owner rejects the request + it('Within Policy. People Ops Partner Group approves and Budget owner rejects ', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Input and output devices are typically housed externally to the main computer chassis. The following are either standard or very common to many computer systems.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('BestBUY'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('mic_and_head'); + cy.get('#root_0_item').clear().type('The central processing unit contains many toxic materials. It contains lead and chromium in metal'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('1'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The template for all modern computers is the Von Neumann architecture, detailed in a 1945 paper by Hungarian mathematician John von Neumann..\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + + }); + }); + + //Within Policy. People Ops Partner Group approves and Budget owner request for additional details + it('Within Policy. People Ops Partner Group approves and Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('equip'); + cy.get('#root_purpose').clear().type('Input devices allow the user to enter information into the system, or control its operation. Most personal computers have a mouse and keyboard, but laptop systems typically use a touchpad instead of a mouse. Other input devices include webcams, mic'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Amazon.com'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('laptops'); + cy.get('#root_0_item').clear().type('Because computer parts contain hazardous materials, there is a growing movement to recycle old'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('GBP'); + cy.get('#root_0_unit_price').type('420'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The personal computer is one of the most common types of computer due to its versatility and relatively low price. Desktop personal computers have a monitor, a keyboard, a mouse, and a computer case.\nhttps://en.wikipedia.org/wiki/Computer_hardware'); + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops escalate to BO second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "escalateBO" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + }); +}); \ No newline at end of file diff --git a/spiffworkflow-frontend/cypress/pilot/NDR_PP1/learninganddev.cy.js b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/learninganddev.cy.js new file mode 100644 index 00000000..b5aabe7d --- /dev/null +++ b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/learninganddev.cy.js @@ -0,0 +1,1439 @@ +const submitWithUser = ( + username, + password, + processInstanceId, + expectAdditionalApprovalInfoPage = false, + approvaltype +) => { + cy.wait(2000); + cy.log('========Login with : ', username); + cy.log('========processInstanceId: ', processInstanceId); + cy.login(username, password); + + cy.wait(1000); + cy.log('=======visit find by id : '); + cy.visit('/admin/process-instances/find-by-id'); + cy.get('#process-instance-id-input').type(processInstanceId); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks I can complete', { timeout: 60000 }); + + cy.get('.cds--btn').contains(/^Go$/).click(); + + cy.wait(2000); + // approve! + if (approvaltype === "approve") { + cy.get('#root > label:nth-child(1)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is approving this.')); + } else if (approvaltype === "reject") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is rejecting this.')); + } else if (approvaltype === "needmoreinfo") { + cy.get('#root > label:nth-child(2)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info. The term, learning and development, encompasses any professional development a business provides to its employees END.')); + } else if (approvaltype === "providemoreinfo") { + //Form 1 + cy.contains('Task: Submit New Demand Request Details', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 2 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 3 + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 }); + + cy.get('.cds--text-area__wrapper').find('#root').clear().type('Providing additional info. Learning and development (L&D) is a function within an organization that is responsible for empowering employees\’ growth and developing their knowledge, skills, and capabilities to drive better business performance.'); + + cy.contains('Submit the Request').click(); + cy.get('input[value="Submit the Request"]').click(); + + } else { + + } + + cy.get('button') + .contains(/^Submit$/) + .click(); + + if (expectAdditionalApprovalInfoPage) { + cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 }); + + cy.get('button') + .contains(/^Continue$/) + .click(); + + } + cy.location({ timeout: 60000 }).should((loc) => { + expect(loc.pathname).to.eq('/tasks'); + }); + cy.wait(2000); + cy.logout(); + cy.wait(2000); +}; + +//Learning and Development Path - Without Files +describe('Learning and Development Path - Without Files', () => { + + Cypress._.times(1, () => { + //People Ops Partner Group approves the request + it('Books Only. People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('A L&D strategy should be aligned to the organization’s business strategy and goals with the aim of developing the workforce’s capability and driving business results.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('AIHR'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('A bounty is a payment or reward of money to locate'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //People Ops Partner Group rejects the request + it('Books Only. People Ops Partner Group rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Learning and development (L&D) is a function within an organization that is responsible for empowering employees’ growth and developing their knowledge, skills, and capabilities to drive better business performance. '); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('EYK Books'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('The role of the L&D function has evolved to meet the demands of digital transformation and a modern workforce.'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('The function may be organized centrally, either independently or sitting under human resources (HR); decentralized throughout different business units; or be a hybrid (sometimes referred to as federated) structure.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //People Ops Partner Group request for additional details + it('Books Only. People Ops Partner Group needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('A comprehensive collection of the concepts, definitions, and methodologies for the profession can be found in the. \nhttps://www.aihr.com/blog/learning-and-development/'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('BOUNTY'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('There are many different roles that make up a learning and development team or fall under the umbrel'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Current and aspiring talent development professionals can enhance their skills with the various professional education courses offered by ATD Education \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + //Budget owner approves and People Ops Partner Group approves the request + it('NOT Books Only. Budget owner approves and People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('In 2019, the Association for Talent Development (ATD) conducted a competency study to assess needed talent development capabilities. The research found that the knowledge, skills, and attitudes (KSAs) of effective talent development professionals'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Lynda.com'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('on_conf'); + cy.get('#root_0_item').clear().type('The goal of learning and development is to develop or change the behavior of individuals or groups'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('course'); + cy.get('#root_1_item').clear().type('The goal of learning and development is to change the behavior of individuals or groups for better'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Crypto'); + cy.get('#root_1_currency').select('DAI'); + cy.get('#root_1_unit_price').type('2450'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('books'); + cy.get('#root_2_item').clear().type('A L&D strategy should be aligned to the organization\’s business strategy'); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('NOT Books Only. Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('Udemy Courses'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('course'); + cy.get('#root_0_item').clear().type('There are many different roles that make up a learning and development team or fall under the'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('NOT Books Only. Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Research found that the knowledge, skills, and attitudes (KSAs) of effective talent development professionals, at every level of their career, fell into three major domains of practice.'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Conference LTD'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('on_conf'); + cy.get('#root_0_item').clear().type('Learning and development is a systematic process to enhance an employee\’s skills'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + + }); + }); + + }); + +}); + +//Learning and Development Path - With Files +describe('Learning and Development Path - With Files', () => { + + Cypress._.times(1, () => { + //People Ops Partner Group approves the request + it('Books Only. People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Training Industry'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('A L&D strategy should be aligned to the organization\’s business strategy and goals'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //People Ops Partner Group rejects the request + it('Books Only. People Ops Partner Group rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('The goal of learning and development is to develop or change the behavior of individuals or groups for the better, sharing knowledge and insights that enable them to do their work better, or cultivate attitudes that help them perform better'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('EYK Books'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('There are many different roles that make up a learning and development team or fall'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //People Ops Partner Group request for additional details + it('Books Only. People Ops Partner Group needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Conference LTD'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('books'); + cy.get('#root_0_item').clear().type('Learning and development is a systematic process to enhance an employee\’s skills'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Learning, training, and development are often used interchangeably. However, there are subtle differences between these concepts, which are shown in the table below. \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //people ops approves second time + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + //Budget owner approves and People Ops Partner Group approves the request + it('NOT Books Only. Budget owner approves and People Ops Partner Group approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('The Leadership Laboratory'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + //item 0 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('on_conf'); + cy.get('#root_0_item').clear().type('There are many different roles that make up a learning and development team'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('course'); + cy.get('#root_1_item').clear().type('The goal of learning and development is to change the behavior of individuals or groups for better'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('CAD'); + cy.get('#root_1_unit_price').type('1355'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('books'); + cy.get('#root_2_item').clear().type('A L&D strategy should be aligned to the organization\’s business strategy'); + cy.get('#root_2_qty').clear().type('6'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('SNT'); + cy.get('#root_2_unit_price').type('2300'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('NOT Books Only. Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('The goal of learning and development is to develop or change the behavior of individuals or groups for the better, sharing knowledge and insights that enable them to do their work better, or cultivate attitudes that help them perform better'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('Lynda.com'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('course'); + cy.get('#root_0_item').clear().type('The goal of learning and development is to develop or change the behavior of individuals or groups'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Learning and development is a systematic process to enhance an employee\’s skills, knowledge, and competency, resulting in better performance in a work setting. \nhttps://www.aihr.com/blog/learning-and-development/'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('NOT Books Only. Budget owner needs more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('learn_and_dev'); + cy.get('#root_purpose').clear().type('Learning, training, and development are often used interchangeably. However, there are subtle differences between these concepts, which are shown in the table below. '); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Conference LTD'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('on_conf'); + cy.get('#root_0_item').clear().type('There are many different roles that make up a learning and development team'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A L&D strategy should be aligned to the organization\’s business strategy and goals with the aim of developing the workforce\’s capability and driving business results.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let peopleOpsUsername = Cypress.env('peopleopssme_username'); + let peopleOpsPassword = Cypress.env('peopleopssme_password'); + cy.log('=====peopleOpsUsername : ' + peopleOpsUsername); + cy.log('=====peopleOpsPassword : ' + peopleOpsPassword); + + submitWithUser( + peopleOpsUsername, + peopleOpsPassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + }); +}); \ No newline at end of file diff --git a/spiffworkflow-frontend/cypress/pilot/NDR_PP1/otherfees.cy.js b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/otherfees.cy.js new file mode 100644 index 00000000..3be8706f --- /dev/null +++ b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/otherfees.cy.js @@ -0,0 +1,744 @@ +const submitWithUser = ( + username, + password, + processInstanceId, + expectAdditionalApprovalInfoPage = false, + approvaltype +) => { + cy.wait(2000); + cy.log('========Login with : ', username); + cy.log('========processInstanceId: ', processInstanceId); + cy.login(username, password); + + cy.wait(1000); + cy.log('=======visit find by id : '); + cy.visit('/admin/process-instances/find-by-id'); + cy.get('#process-instance-id-input').type(processInstanceId); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks I can complete', { timeout: 60000 }); + + cy.get('.cds--btn').contains(/^Go$/).click(); + + cy.wait(2000); + // approve! + if (approvaltype === "approve") { + cy.get('#root > label:nth-child(1)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is approving this.')); + } else if (approvaltype === "reject") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is rejecting this.')); + } else if (approvaltype === "needmoreinfo") { + cy.get('#root > label:nth-child(2)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info. Coworking is not only about providing a physical place, but also about establishing a community. Its rapid growth has been seen as a possible way for city planners to address.')); + } else if (approvaltype === "providemoreinfo") { + //Form 1 + cy.contains('Task: Submit New Demand Request Details', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 2 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 3 + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 }); + + cy.get('.cds--text-area__wrapper').find('#root').clear().type('Providing additional info. Coworking tends to fall into two sides: Those that are real-estate-centric (all about selling desks and offices first) while others are community-centric (focused on building community that happens to also have offices)'); + + cy.contains('Submit the Request').click(); + cy.get('input[value="Submit the Request"]').click(); + + } else { + + } + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + if (expectAdditionalApprovalInfoPage) { + cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 }); + + cy.get('button') + .contains(/^Continue$/) + .click(); + + } + cy.location({ timeout: 60000 }).should((loc) => { + expect(loc.pathname).to.eq('/tasks'); + }); + cy.wait(2000); + cy.logout(); + cy.wait(2000); +}; + +describe('Other Fees Path - Without Files', () => { + + Cypress._.times(1, () => { + //Budget owner approves the request + it('Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type('Other Fees and Expenses means, collectively, all fees and expenses payable to Lenders under the Loan Documents, other than principal, interest and default interest/penalty amounts.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('ABC CO'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('bounties'); + cy.get('#root_0_item').clear().type('A bounty is a payment or reward of money to locate'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('coworking'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Crypto'); + cy.get('#root_1_currency').select('SNT'); + cy.get('#root_1_unit_price').type('1355'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type('Other Fees and Expenses means, collectively, all fees and expenses payable to Lenders under the Loan Documents, other than principal, interest and default interest/penalty amounts.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('CO-WORK ENG'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('coworking'); + cy.get('#root_0_item').clear().type('Coworking is an arrangement in which workers for different companies share an office space'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type(' It allows cost savings and convenience through the use of common infrastructures, such as equipment, utilities and receptionist and custodial services, and in some cases refreshments and parcel services.\nhttps://en.wikipedia.org/wiki/Coworking'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('Bounty Co'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('bounties'); + cy.get('#root_0_item').clear().type('Coworking is not only about providing a physical place, but also about establishing a community.'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got freelancers for jobs of any size or budget across 1800 skills. No job is too complex.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + }); +}); + +describe('Other Fees Path - With Files', () => { + + Cypress._.times(1, () => { + //Budget owner approves the request + it('Budget owner approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type('It allows cost savings and convenience through the use of common infrastructures, such as equipment, utilities and receptionist and custodial services, and in some cases refreshments and parcel acceptance services'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Embassar'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('bounties'); + cy.get('#root_0_item').clear().type('A bounty is a payment or reward of money to locate'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('coworking'); + cy.get('#root_1_item').clear().type('A consultant (from Latin: consultare "to deliberate") is a professional'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Crypto'); + cy.get('#root_1_currency').select('DAI'); + cy.get('#root_1_unit_price').type('4250'); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type('Other Fees and Expenses means, collectively, all fees and expenses payable to Lenders under the Loan Documents, other than principal, interest and default interest/penalty amounts.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('CO-WORK ENG'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('coworking'); + cy.get('#root_0_item').clear().type('Coworking is not only about providing a physical place, but also about establishing a community'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('other_fees'); + cy.get('#root_purpose').clear().type('It allows cost savings and convenience through the use of common infrastructures, such as equipment, utilities and receptionist and custodial services, and in some cases refreshments and parcel services.\nhttps://en.wikipedia.org/wiki/Coworking'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('BOUNTY'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('bounties'); + cy.get('#root_0_item').clear().type('Coworking is distinct from business accelerators, business incubators, and executive suites.'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got freelancers for jobs of any size or budget across 1800 skills. No job is too complex.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + }); + }); + + }); +}); \ No newline at end of file diff --git a/spiffworkflow-frontend/cypress/pilot/NDR_PP1/softwarelicense.cy.js b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/softwarelicense.cy.js new file mode 100644 index 00000000..f0675f0d --- /dev/null +++ b/spiffworkflow-frontend/cypress/pilot/NDR_PP1/softwarelicense.cy.js @@ -0,0 +1,2801 @@ +const submitWithUser = ( + username, + password, + processInstanceId, + expectAdditionalApprovalInfoPage = false, + approvaltype +) => { + cy.wait(2000); + cy.log('========Login with : ', username); + cy.log('========processInstanceId: ', processInstanceId); + cy.login(username, password); + + cy.wait(1000); + cy.log('=======visit find by id : '); + cy.visit('/admin/process-instances/find-by-id'); + cy.get('#process-instance-id-input').type(processInstanceId); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks I can complete', { timeout: 60000 }); + + cy.get('.cds--btn').contains(/^Go$/).click(); + + cy.wait(2000); + // approve! + if (approvaltype === "approve") { + cy.get('#root > label:nth-child(1)').click(); + //cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is approving this.')); + } else if (approvaltype === "reject") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is rejecting this.')); + } else if (approvaltype === "needmoreinfo") { + cy.get('#root > label:nth-child(2)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info. A software license is a document that provides legally binding guidelines for the use and distribution of software.Software licenses typically provide end users with the right to END.')); + //cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' needs additional info.')); + } else if (approvaltype === "cpapproved") { + cy.get('#root > label:nth-child(3)').click(); + cy.get('.cds--text-area__wrapper').find('#root').type(username.concat(' is selecting CP is Approved.')); + } else if (approvaltype === "providemoreinfo") { + //Form 1 + cy.contains('Task: Submit New Demand Request Details', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 2 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('button') + .contains(/^Submit$/) + .click(); + //Form 3 + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 }); + + cy.get('.cds--text-area__wrapper').find('#root').clear().type('Providing additional info. Open source is a term that originally referred to open source software (OSS). Open source software is code that is designed to be publicly accessible—anyone can see, modify, and distribute.'); + + cy.contains('Submit the Request').click(); + cy.get('input[value="Submit the Request"]').click(); + + } else { + + } + + cy.get('button') + .contains(/^Submit$/) + .click(); + + if (expectAdditionalApprovalInfoPage) { + cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 }); + + cy.get('button') + .contains(/^Continue$/) + .click(); + + } + cy.location({ timeout: 60000 }).should((loc) => { + expect(loc.pathname).to.eq('/tasks'); + }); + cy.wait(2000); + cy.logout(); + cy.wait(2000); +}; + +//Software and Licenses Path - Without Files +describe('Software and Licenses Path - Without Files', () => { + + Cypress._.times(1, () => { + + //Everyone approves with CP + it('Everyone approves with CP', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Sware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Microsoft'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Open source software is code that is designed to be publicly accessible anyone can see, modify, END'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('1915'); + + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that provides legally binding guidelines for the use and distri END'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('AED'); + cy.get('#root_1_unit_price').type('4500'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('2021 Newest HP 17.3 inch FHD Laptop, AMD Ryzen 5 5500U 6core(Beat i7-1160G7, up to 4.0GHz),16GB RAM, 1TB PCIe SSD, Bluetooth 4.2, WiFi, HDMI, USB-A&C, Windows 10 S, w/Ghost Manta Accessories, Silver\nhttps://www.amazon.com/HP-i7-11G7-Bluetooth-Windows'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Everyone approves the request + it('Everyone approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software licenses typically are proprietary, free or open source. The distinguishing feature is the terms under which users may redistribute or copy the software for future development or use.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Red Hat'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Open source has become a movement and a way of working that reaches beyond software production'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('NZD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that states the rights of the developer and user of software.'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('STN'); + cy.get('#root_1_unit_price').type('380'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Software also comes with a license key or product key. The key is used to identify and verify the specific version of the software. It is also used to activate the software device.\nhttps://www.techtarget.com/searchcio/definition/software-license'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + 'Task: Update Application Landscape', + "approve" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Sware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('Oracle'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('It defines the terms of. A user must agree to the terms of the license when acquiring the software.'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Software also comes with a license key or product key. The key is used to identify and verify the specific version of the software. It is also used to activate the software device.\nhttps://www.techtarget.com/searchcio/definition/software-license'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('A software license establishes the rights of all parties involved with the software: the author, the provider and the end users. It defines the relationship between the software company and users and explains how they are protected'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('ABC Licensing Co'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('They protect developers\' intellectual property and trade secrets based on copyright laws'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('They define what users can do with software code they did not write.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + //Infra reject the request + it('Infra rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('There are two general types of software licenses that differ based on how they are viewed under copyright law.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Meta'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('The open source movement uses the values and decentralized production model of open source software'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Free and open source software (FOSS) licenses are often referred to as open source. FOSS source code is available to the customer along with the software product. The customer is usually allowed to use the source code to change the software.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + + }); +}); + +//Software and Licenses Path - With Files +describe('Software and Licenses Path - With Files', () => { + + Cypress._.times(1, () => { + + //Everyone approves with CP + it('Everyone approves with CP', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Sware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Microsoft Corp'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Open source software is developed in a decentralized and collaborative way'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('JPY'); + cy.get('#root_0_unit_price').type('2416'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that provides legally binding guidelines for the use and distri END'); + cy.get('#root_1_qty').clear().type('1'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('INR'); + cy.get('#root_1_unit_price').type('4500'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Open source software is developed in a decentralized and collaborative way, relying on peer review and community production. Open source software is often cheaper more flexible. \nhttps://www.redhat.com/en'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Everyone approves the request + it('Everyone approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Stware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('ORACLE LTD'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('A bounty is a payment or reward of money to locate'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('SGD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Open source software is developed in a decentralized and collaborative way, relying on peer review and community production. Open source software is often cheaper more flexible. \nhttps://www.redhat.com/en'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'png-5mb-2.png']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + 'Task: Update Application Landscape', + "approve" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Stware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('Subscription PVT'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('Software development consultants with Python background'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('250'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Open source software is developed in a decentralized and collaborative way, relying on peer review and community production. Open source software is often cheaper more flexible. \nhttps://www.redhat.com/en'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Stware\nA software license is a document that provides legally binding guidelines for the use and distribution of software.\nSoftware licenses typically provide end users with the right to one or more copies of the software without violating copyrights'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('LIC INST'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('Freelancers to do the Python development and front end react app development'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software. Under United States copyright law, all software is copyright protected.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + + + }); + }); + + //Infra rejects the request + it('Infra Rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Proprietary licenses are often referred to as closed source. They provide customers with operational code. Users cannot freely alter this software. These licenses also usually restrict reverse engineering the software\'s code to obtain the source code'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Red HAT'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('They provide customers with operational code. Users cannot freely alter this software.'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('STN'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('Free and open source software (FOSS) licenses are often referred to as open source. FOSS source code is available to the customer along with the software product. The customer is usually allowed to use the source code to change the software.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'png-5mb-2.png']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + }); +}); + +//Software and Licenses Path - With Files and Multiple items +describe('Software and Licenses Path - With Files and Multiple items', () => { + + Cypress._.times(1, () => { + + //Everyone approves with CP + it('Everyone approves with CP', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software licenses typically are proprietary, free or open source. The distinguishing feature is the terms under which users may redistribute or copy the software for future development or use.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Meta Corp'); + cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Definition. Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_0_qty').clear().type('1'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that provides binding guidelines for the use and distribution.'); + cy.get('#root_1_qty').clear().type('5'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('EUR'); + cy.get('#root_1_unit_price').type('250'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('lic_and_sub'); + cy.get('#root_2_item').clear().type('Subscription relates to a licensing model that allows users to pay regularly for a computer program'); + cy.get('#root_2_qty').clear().type('10'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('DAI'); + cy.get('#root_2_unit_price').type('12500'); + + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software. Under United States copyright law, all software is copyright protected.'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Everyone approves the request + it('Everyone approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software licenses typically are proprietary, free or open source. The distinguishing feature is the terms under which users may redistribute or copy the software for future development or use.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Apple'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Definition. Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AED'); + cy.get('#root_0_unit_price').type('1250'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that provides binding guidelines for the use and distribution.'); + cy.get('#root_1_qty').clear().type('5'); + cy.get('#root_1_currency_type').select('Crypto'); + cy.get('#root_1_currency').select('SNT'); + cy.get('#root_1_unit_price').type('25000'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('lic_and_sub'); + cy.get('#root_2_item').clear().type('Subscription relates to a licensing model that allows users to pay regularly for a computer program'); + cy.get('#root_2_qty').clear().type('3'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('ETH'); + cy.get('#root_2_unit_price').type('2.10'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software. Under United States copyright law, \nhttps://en.wikipedia.org/wiki/Software_license'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'png-5mb-2.png']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + 'Task: Update Application Landscape', + "approve" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software licenses typically are proprietary, free or open source. The distinguishing feature is the terms under which users may redistribute or copy the software for future development or use.'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + cy.get('#root_vendor').clear().type('Subscription PVT'); + cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + + //item 0 + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('Subscription relates to a licensing model that allows users to pay regularly for a computer program'); + cy.get('#root_0_qty').clear().type('5'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('250.50'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('lic_and_sub'); + cy.get('#root_1_item').clear().type('A software license is a document that provides binding guidelines for the use and distribution.'); + cy.get('#root_1_qty').clear().type('5'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('GBP'); + cy.get('#root_1_unit_price').type('5200'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('op_src'); + cy.get('#root_2_item').clear().type('Definition. Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_2_qty').clear().type('3'); + cy.get('#root_2_currency_type').select('Fiat'); + cy.get('#root_2_currency').select('HKD'); + cy.get('#root_2_unit_price').type('2100'); + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software. Under United States copyright law, \nhttps://en.wikipedia.org/wiki/Software_license'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Free and open source software (FOSS) licenses are often referred to as open source. FOSS source code is available to the customer along with the software product. The customer is usually allowed to use the source code to change the software.'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + cy.get('#root_vendor').clear().type('LIC INST'); + cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + //item 0 + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('A software license is a document that provides binding guidelines for the use and distribution.'); + cy.get('#root_0_qty').clear().type('24'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('op_src'); + cy.get('#root_1_item').clear().type('Definition. Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_1_qty').clear().type('15'); + cy.get('#root_1_currency_type').select('Crypto'); + cy.get('#root_1_currency').select('ETH'); + cy.get('#root_1_unit_price').type('0.85'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('lic_and_sub'); + cy.get('#root_2_item').clear().type('Subscription relates to a licensing model that allows users to pay regularly for a computer program'); + cy.get('#root_2_qty').clear().type('8'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('DAI'); + cy.get('#root_2_unit_price').type('2100'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software. Under United States copyright law, \nhttps://en.wikipedia.org/wiki/Software_license'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'sampletext.txt']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + + + }); + }); + + //Infra rejects the request + it('Infra Rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Free and open source software (FOSS) licenses are often referred to as open source. FOSS source code is available to the customer along with the software product. The customer is usually allowed to use the source code to change the software.'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + cy.get('#root_vendor').clear().type('Atlassian'); + cy.get('#root_payment_method').select('Debit Card'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + //item 0 + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Definition. Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('THB'); + cy.get('#root_0_unit_price').type('1350'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 1 + cy.get('#root_1_sub_category').select('op_src'); + cy.get('#root_1_item').clear().type('Open source software (OSS) is software that is distributed with its source code'); + cy.get('#root_1_qty').clear().type('15'); + cy.get('#root_1_currency_type').select('Fiat'); + cy.get('#root_1_currency').select('TRY'); + cy.get('#root_1_unit_price').type('3200'); + + cy.get('#root > div:nth-child(3) > p > button').click(); + + //item 2 + cy.get('#root_2_sub_category').select('lic_and_sub'); + cy.get('#root_2_item').clear().type('Subscription relates to a licensing model that allows users to pay regularly for a computer program'); + cy.get('#root_2_qty').clear().type('8'); + cy.get('#root_2_currency_type').select('Crypto'); + cy.get('#root_2_currency').select('DAI'); + cy.get('#root_2_unit_price').type('2100'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + cy.get('.cds--text-area__wrapper').find('#root').type('A typical software license grants the licensee, typically an end-user, permission to use one or more copies of software in ways where such a use would otherwise potentially constitute copyright.\nhttps://en.wikipedia.org/wiki/Software_license'); + + cy.get("input[type=file]") + .attachFile(['lorem-ipsum.pdf', 'png-5mb-1.png', 'Free_Test_Data_1MB_PDF.pdf', 'png-5mb-2.png']); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + }); +}); + +//Software and Licenses Path - Without Files and with only mandatory fields +describe('Software and Licenses Path - Without Files and with only mandatory fields', () => { + + Cypress._.times(1, () => { + + //Everyone approves with CP + it('Everyone approves with CP', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Need to buy a Software'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2024-11-25'); + //cy.get('#root_vendor').clear().type('Embassar'); + //cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Open source software'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('USD'); + cy.get('#root_0_unit_price').type('550'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + //cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Everyone approves the request + it('Everyone approves', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('need software'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + //cy.get('#root_vendor').clear().type('Embassar'); + //cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('open source'); + cy.get('#root_0_qty').clear().type('1'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AED'); + cy.get('#root_0_unit_price').type('1520'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + //cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + 'Task: Update Application Landscape', + "approve" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + }); + }); + + //Budget owner rejects the request + it('Budget owner rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Nee license'); + cy.get('#root_criticality').select('Medium'); + cy.get('#root_period').clear().type('2024-02-06'); + //cy.get('#root_vendor').clear().type('Subsc LTD'); + //cy.get('#root_payment_method').select('Bank Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('Software development'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('EUR'); + cy.get('#root_0_unit_price').type('1400'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + //cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + //Budget owner request for additional details + it('Budget owner need more info', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software needed'); + cy.get('#root_criticality').select('Low'); + cy.get('#root_period').clear().type('2025-02-25'); + //cy.get('#root_vendor').clear().type('ABC Licensing Co'); + //cy.get('#root_payment_method').select('Crypto Transfer'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('lic_and_sub'); + cy.get('#root_0_item').clear().type('License'); + cy.get('#root_0_qty').clear().type('4'); + cy.get('#root_0_currency_type').select('Crypto'); + cy.get('#root_0_currency').select('SNT'); + cy.get('#root_0_unit_price').type('450'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + //cy.get('.cds--text-area__wrapper').find('#root').type('It\’s free and easy to post a job. Simply fill in a title, description and budget and competitive bids come within minutes. No job is too big or too small. We\'ve got freelancers for jobs of any size or budget across 1800 skills. No job is too complex.'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + null, + "needmoreinfo" + ); + + //requestor sending additional info + submitWithUser( + username, + password, + processInstanceId, + null, + "providemoreinfo" + ); + + //budget owner approves second time + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "cpapproved" + ); + + submitWithUser( + legalsmeUsername, + legalsmePassword, + processInstanceId, + null, + "approve" + ); + + + }); + }); + + //Infra reject the request + it('Infra rejects', () => { + let username = Cypress.env('requestor_username'); + let password = Cypress.env('requestor_password'); + cy.log('=====username : ' + username); + cy.log('=====password : ' + password); + + cy.login(username, password); + cy.visit('/'); + + cy.contains('Start New +').click(); + cy.contains('Raise New Demand Request'); + + cy.runPrimaryBpmnFile(true); + + cy.contains('Please select the type of request to start the process.'); + // wait a second to ensure we can click the radio button + + cy.wait(2000); + cy.get('input#root-procurement').click(); + cy.wait(2000); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Submit a new demand request for the procurement of needed items', + { timeout: 60000 } + ); + + cy.url().then((currentUrl) => { + // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" + // extract the digits after /tasks + const processInstanceId = currentUrl.match(/(?<=\/tasks\/)\d+/)[0]; + cy.log('==###############===processInstanceId : ', processInstanceId); + let projectId = Cypress.env('project_id'); + cy.get('#root_project').select(projectId); + cy.get('#root_category').select('soft_and_lic'); + cy.get('#root_purpose').clear().type('Software is needed'); + cy.get('#root_criticality').select('High'); + cy.get('#root_period').clear().type('2025-11-25'); + // cy.get('#root_vendor').clear().type('Embassar'); + // cy.get('#root_payment_method').select('Reimbursement'); + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.get('#root_0_sub_category').select('op_src'); + cy.get('#root_0_item').clear().type('Open source'); + cy.get('#root_0_qty').clear().type('2'); + cy.get('#root_0_currency_type').select('Fiat'); + cy.get('#root_0_currency').select('AUD'); + cy.get('#root_0_unit_price').type('2416'); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + cy.contains( + 'Review and provide any supporting information or files for your request.', + { timeout: 60000 } + ); + + //cy.get('.cds--text-area__wrapper').find('#root').type('For professionals working in the professional services, ‘consultant’ and advisor’ are often used and fall under common terminology. Consultancy.uk zooms in on this field to get a closer look. \n https://www.consultancy.uk/career/what-is-consulting'); + + cy.contains('Submit the Request').click(); + + cy.get('input[value="Submit the Request"]').click(); + + + cy.get('button') + .contains(/^Submit$/) + .click(); + + + cy.contains('Tasks for my open instances', { timeout: 60000 }); + cy.logout(); + + let budgetOwnerUsername = Cypress.env('budgetowner_username'); + let budgetOwnerPassword = Cypress.env('budgetowner_password'); + cy.log('=====budgetOwnerUsername : ' + budgetOwnerUsername); + cy.log('=====budgetOwnerPassword : ' + budgetOwnerPassword); + + submitWithUser( + budgetOwnerUsername, + budgetOwnerPassword, + processInstanceId, + 'Task: Reminder: Request Additional Budget', + "approve" + ); + + let ppgbasmeUsername = Cypress.env('ppgbasme_username'); + let ppgbasmePassword = Cypress.env('ppgbasme_username'); + let securitysmeUsername = Cypress.env('securitysme_username'); + let securitysmePassword = Cypress.env('securitysme_password'); + let infrasmeUsername = Cypress.env('infrasme_username'); + let infrasmePassword = Cypress.env('infrasme_password'); + let legalsmeUsername = Cypress.env('legalsme_username'); + let legalsmePassword = Cypress.env('legalsme_password'); + + submitWithUser( + ppgbasmeUsername, + ppgbasmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + securitysmeUsername, + securitysmePassword, + processInstanceId, + null, + "approve" + ); + + submitWithUser( + infrasmeUsername, + infrasmePassword, + processInstanceId, + null, + "reject" + ); + + }); + }); + + + }); +}); \ No newline at end of file diff --git a/spiffworkflow-frontend/cypress/pilot/pp1.cy.js b/spiffworkflow-frontend/cypress/pilot/pp1.cy.js index bcb2091f..f2672f26 100644 --- a/spiffworkflow-frontend/cypress/pilot/pp1.cy.js +++ b/spiffworkflow-frontend/cypress/pilot/pp1.cy.js @@ -1,9 +1,13 @@ const approveWithUser = ( username, processInstanceId, - expectAdditionalApprovalInfoPage = false + expectAdditionalApprovalInfoPage = false, + password = null ) => { - cy.login(username, username); + if (!password) { + password = username; + } + cy.login(username, password); cy.visit('/admin/process-instances/find-by-id'); cy.get('#process-instance-id-input').type(processInstanceId); cy.get('button') @@ -35,20 +39,8 @@ describe('pp1', () => { cy.login('core-a1.contributor', 'core-a1.contributor'); cy.visit('/'); cy.contains('Start New +').click(); - cy.contains('Raise New Demand Request'); + cy.contains('New Demand Request - Procurement').click(); cy.runPrimaryBpmnFile(true); - cy.contains('Please select the type of request to start the process.'); - // wait a second to ensure we can click the radio button - cy.wait(2000); - cy.get('input#root-procurement').click(); - cy.wait(2000); - cy.get('button') - .contains(/^Submit$/) - .click(); - cy.contains( - 'Submit a new demand request for the procurement of needed items', - { timeout: 60000 } - ); cy.url().then((currentUrl) => { // if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560" @@ -64,17 +56,12 @@ describe('pp1', () => { cy.get('#root_payment_method').select('Bank Transfer'); cy.get('#root_project').select('18564'); cy.get('#root_category').select('soft_and_lic'); - cy.get('button') - .contains(/^Submit$/) - .click(); - - cy.contains('Task: Enter NDR Items', { timeout: 60000 }); - cy.get('#root_0_sub_category').select('op_src'); - cy.get('#root_0_item').clear().type('spiffworkflow'); - cy.get('#root_0_qty').clear().type('1'); - cy.get('#root_0_currency_type').select('Fiat'); - cy.get('#root_0_currency').select('AUD'); - cy.get('#root_0_unit_price').type('100'); + cy.get('#root_item_0_sub_category').select('op_src'); + cy.get('#root_item_0_item_name').clear().type('spiffworkflow'); + cy.get('#root_item_0_qty').clear().type('1'); + cy.get('#root_item_0_currency_type').select('Fiat'); + cy.get('#root_item_0_currency').select('AUD'); + cy.get('#root_item_0_unit_price').type('100'); cy.get('button') .contains(/^Submit$/) .click(); @@ -94,7 +81,8 @@ describe('pp1', () => { approveWithUser( 'infra.project-lead', processInstanceId, - 'Task: Reminder: Request Additional Budget' + 'Task: Reminder: Check Existing Budget', + 'infra.project-leadx' ); approveWithUser('ppg.ba-a1.sme', processInstanceId); approveWithUser('security-a1.sme', processInstanceId); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index 404c9af7..6f3c9157 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -1,6 +1,7 @@ import { string } from 'prop-types'; import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; import { miscDisplayName } from './helpers'; +import 'cypress-file-upload'; // *********************************************** // This example commands.js shows you how to @@ -97,12 +98,12 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => { cy.contains(`Process Model: ${modelDisplayName}`); }); +// Intended to be run from the process model show page Cypress.Commands.add( 'runPrimaryBpmnFile', (expectAutoRedirectToHumanTask = false) => { // cy.getBySel('start-process-instance').click(); // click on button with text Start - cy.get('button') .contains(/^Start$/) .click(); @@ -154,6 +155,10 @@ Cypress.Commands.add( .then(($element) => { const oldId = $element.text().trim(); cy.get('.cds--pagination__button--forward').click(); + cy.contains( + `[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, + oldId + ).should('not.exist'); cy.contains(/\b3–4 of \d+/); cy.get('.cds--pagination__button--backward').click(); cy.contains(/\b1–2 of \d+/); diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index adf99f3b..a578aa3a 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -72,6 +72,7 @@ "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", "cypress": "^12", + "cypress-file-upload": "^5.0.8", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", @@ -8065,7 +8066,7 @@ }, "node_modules/bpmn-js-spiffworkflow": { "version": "0.0.8", - "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#82260144f90d9a311155066d637664d9e2a3f02e", + "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#24a71ec5e2cbbefce58be4b4610151db4a55a8e1", "license": "MIT", "dependencies": { "inherits": "^2.0.4", @@ -10024,6 +10025,18 @@ "node": "^14.0.0 || ^16.0.0 || >=18.0.0" } }, + "node_modules/cypress-file-upload": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/cypress-file-upload/-/cypress-file-upload-5.0.8.tgz", + "integrity": "sha512-+8VzNabRk3zG6x8f8BWArF/xA/W0VK4IZNx3MV0jFWrJS/qKn8eHfa5nU73P9fOQAgwHFJx7zjg4lwOnljMO8g==", + "dev": true, + "engines": { + "node": ">=8.2.1" + }, + "peerDependencies": { + "cypress": ">3.0.0" + } + }, "node_modules/cypress-plugin-config": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/cypress-plugin-config/-/cypress-plugin-config-1.2.0.tgz", @@ -38214,7 +38227,7 @@ } }, "bpmn-js-spiffworkflow": { - "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#82260144f90d9a311155066d637664d9e2a3f02e", + "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#24a71ec5e2cbbefce58be4b4610151db4a55a8e1", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "requires": { "inherits": "^2.0.4", @@ -39829,6 +39842,13 @@ } } }, + "cypress-file-upload": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/cypress-file-upload/-/cypress-file-upload-5.0.8.tgz", + "integrity": "sha512-+8VzNabRk3zG6x8f8BWArF/xA/W0VK4IZNx3MV0jFWrJS/qKn8eHfa5nU73P9fOQAgwHFJx7zjg4lwOnljMO8g==", + "dev": true, + "requires": {} + }, "cypress-plugin-config": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/cypress-plugin-config/-/cypress-plugin-config-1.2.0.tgz", diff --git a/spiffworkflow-frontend/package.json b/spiffworkflow-frontend/package.json index ade625c7..5294eff1 100644 --- a/spiffworkflow-frontend/package.json +++ b/spiffworkflow-frontend/package.json @@ -108,6 +108,7 @@ "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", "cypress": "^12", + "cypress-file-upload": "^5.0.8", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index eb4f17bf..81b644d1 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -36,6 +36,8 @@ import { getProcessModelFullIdentifierFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, } from '../helpers'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; @@ -68,9 +70,6 @@ import useAPIError from '../hooks/UseApiError'; import { usePermissionFetcher } from '../hooks/PermissionService'; import { Can } from '../contexts/Can'; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; - type OwnProps = { filtersEnabled?: boolean; processModelFullIdentifier?: string; @@ -140,6 +139,7 @@ export default function ProcessInstanceListTable({ const [endFromTimeInvalid, setEndFromTimeInvalid] = useState(false); const [endToTimeInvalid, setEndToTimeInvalid] = useState(false); const [requiresRefilter, setRequiresRefilter] = useState(false); + const [lastColumnFilter, setLastColumnFilter] = useState(''); const processInstanceListPathPrefix = variant === 'all' @@ -388,8 +388,8 @@ export default function ProcessInstanceListTable({ checkFiltersAndRun(); if (autoReload) { return refreshAtInterval( - REFRESH_INTERVAL, - REFRESH_TIMEOUT, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, checkFiltersAndRun ); } @@ -1105,10 +1105,18 @@ export default function ProcessInstanceListTable({ return null; } - // get the columns anytime we display the filter options if they are empty - if (availableReportColumns.length < 1) { + let queryParamString = ''; + if (processModelSelection) { + queryParamString += `?process_model_identifier=${processModelSelection.id}`; + } + // get the columns anytime we display the filter options if they are empty. + // and if the columns are not empty, check if the columns are stale + // because we selected a different process model in the filter options. + const columnFilterIsStale = lastColumnFilter !== queryParamString; + if (availableReportColumns.length < 1 || columnFilterIsStale) { + setLastColumnFilter(queryParamString); HttpService.makeCallToBackend({ - path: `/process-instances/reports/columns`, + path: `/process-instances/reports/columns${queryParamString}`, successCallback: setAvailableReportColumns, }); } @@ -1295,7 +1303,6 @@ export default function ProcessInstanceListTable({ end_in_seconds: 'End Time', status: 'Status', process_initiator_username: 'Started By', - spiff_step: 'SpiffWorkflow Step', }; const getHeaderLabel = (header: string) => { return headerLabels[header] ?? header; diff --git a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx index b7debc6b..21847bbf 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx @@ -36,10 +36,17 @@ export default function ProcessModelSearch({ const shouldFilterProcessModel = (options: any) => { const processModel: ProcessModel = options.item; - const { inputValue } = options; - return getFullProcessModelLabel(processModel) - .toLowerCase() - .includes((inputValue || '').toLowerCase()); + let { inputValue } = options; + if (!inputValue) { + inputValue = ''; + } + const inputValueArray = inputValue.split(' '); + const processModelLowerCase = + getFullProcessModelLabel(processModel).toLowerCase(); + + return inputValueArray.every((i: any) => { + return processModelLowerCase.includes((i || '').toLowerCase()); + }); }; return ( any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -364,18 +364,18 @@ export default function ReactDiagramEditor({ function highlightBpmnIoElement( canvas: any, - processInstanceTask: ProcessInstanceTask, + task: Task, bpmnIoClassName: string, bpmnProcessIdentifiers: string[] ) { - if (checkTaskCanBeHighlighted(processInstanceTask.name)) { + if (checkTaskCanBeHighlighted(task.bpmn_identifier)) { try { if ( bpmnProcessIdentifiers.includes( - processInstanceTask.process_identifier + task.bpmn_process_definition_identifier ) ) { - canvas.addMarker(processInstanceTask.name, bpmnIoClassName); + canvas.addMarker(task.bpmn_identifier, bpmnIoClassName); } } catch (bpmnIoError: any) { // the task list also contains task for processes called from call activities which will @@ -450,7 +450,10 @@ export default function ReactDiagramEditor({ if (alreadyImportedXmlRef.current) { return; } - diagramModelerToUse.importXML(diagramXMLToDisplay); + diagramModelerToUse.importXML(diagramXMLToDisplay).then(() => { + diagramModelerToUse.get('canvas').zoom('fit-viewport'); + }); + alreadyImportedXmlRef.current = true; } diff --git a/spiffworkflow-frontend/src/components/TaskListTable.tsx b/spiffworkflow-frontend/src/components/TaskListTable.tsx index 26577b3b..b6901f73 100644 --- a/spiffworkflow-frontend/src/components/TaskListTable.tsx +++ b/spiffworkflow-frontend/src/components/TaskListTable.tsx @@ -9,14 +9,14 @@ import { getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, } from '../helpers'; import HttpService from '../services/HttpService'; import { PaginationObject, ProcessInstanceTask } from '../interfaces'; import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; type OwnProps = { apiPath: string; @@ -89,7 +89,11 @@ export default function TaskListTable({ }; getTasks(); if (autoReload) { - return refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); + return refreshAtInterval( + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, + getTasks + ); } return undefined; }, [ @@ -141,7 +145,7 @@ export default function TaskListTable({ rowElements.push( diff --git a/spiffworkflow-frontend/src/helpers.tsx b/spiffworkflow-frontend/src/helpers.tsx index 88ab1522..273e12ae 100644 --- a/spiffworkflow-frontend/src/helpers.tsx +++ b/spiffworkflow-frontend/src/helpers.tsx @@ -270,3 +270,11 @@ export const encodeBase64 = (data: string) => { export const decodeBase64 = (data: string) => { return Buffer.from(data, 'base64').toString('ascii'); }; + +const MINUTES_IN_HOUR = 60; +const SECONDS_IN_MINUTE = 60; +const SECONDS_IN_HOUR = MINUTES_IN_HOUR * SECONDS_IN_MINUTE; +const FOUR_HOURS_IN_SECONDS = SECONDS_IN_HOUR * 4; + +export const REFRESH_INTERVAL_SECONDS = 5; +export const REFRESH_TIMEOUT_SECONDS = FOUR_HOURS_IN_SECONDS; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index a8d73690..1d34054d 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,8 +21,47 @@ export interface RecentProcessModel { processModelDisplayName: string; } -export interface ProcessInstanceTask { +export interface TaskPropertiesJson { + parent: string; +} + +export interface TaskDefinitionPropertiesJson { + spec: string; +} + +export interface EventDefinition { + typename: string; + payload: any; + event_definitions: [EventDefinition]; + + message_var?: string; +} + +export interface Task { id: number; + guid: string; + bpmn_identifier: string; + + bpmn_name?: string; + + bpmn_process_direct_parent_guid: string; + bpmn_process_definition_identifier: string; + data: any; + state: string; + typename: string; + properties_json: TaskPropertiesJson; + task_definition_properties_json: TaskDefinitionPropertiesJson; + + event_definition?: EventDefinition; +} + +export interface TaskIds { + completed: Task[]; + readyOrWaiting: Task[]; +} + +export interface ProcessInstanceTask { + id: string; task_id: string; calling_subprocess_task_id: string; @@ -46,7 +85,6 @@ export interface ProcessInstanceTask { type: string; updated_at_in_seconds: number; - task_spiff_step?: number; potential_owner_usernames?: string; assigned_user_group_identifier?: string; } @@ -90,7 +128,6 @@ export interface ProcessInstance { end_in_seconds: number | null; process_initiator_username: string; bpmn_xml_file_contents?: string; - spiff_step?: number; created_at_in_seconds: number; updated_at_in_seconds: number; bpmn_version_control_identifier: string; @@ -258,3 +295,20 @@ export interface JsonSchemaForm { process_model_id: string; required: string[]; } + +export interface ProcessInstanceLogEntry { + bpmn_process_definition_identifier: string; + bpmn_process_definition_name: string; + bpmn_task_type: string; + event_type: string; + spiff_task_guid: string; + task_definition_identifier: string; + task_guid: string; + timestamp: number; + id: number; + process_instance_id: number; + + task_definition_name?: string; + user_id?: number; + username?: string; +} diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index d183dc01..d04d50b1 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -73,7 +73,7 @@ export default function AdminRoutes() { element={} /> } /> } /> } /> { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 4f1d39be..a59b2fab 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Table, Tabs, TabList, Tab } from '@carbon/react'; -import { useParams, useSearchParams } from 'react-router-dom'; +import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { @@ -10,6 +10,7 @@ import { } from '../helpers'; import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { ProcessInstanceLogEntry } from '../interfaces'; type OwnProps = { variant: string; @@ -50,25 +51,25 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { isDetailedView, ]); - const getTableRow = (row: any) => { + const getTableRow = (logEntry: ProcessInstanceLogEntry) => { const tableRow = []; const taskNameCell = ( - {row.task_definition_name || - (row.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || - (row.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} + {logEntry.task_definition_name || + (logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || + (logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} ); const bpmnProcessCell = ( - {row.bpmn_process_definition_name || - row.bpmn_process_definition_identifier} + {logEntry.bpmn_process_definition_name || + logEntry.bpmn_process_definition_identifier} ); if (isDetailedView) { tableRow.push( <> - {row.id} + {logEntry.id} {bpmnProcessCell} {taskNameCell} @@ -84,24 +85,44 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { if (isDetailedView) { tableRow.push( <> - {row.bpmn_task_type} - {row.event_type} + {logEntry.bpmn_task_type} + {logEntry.event_type} - {row.username || ( + {logEntry.username || ( system )} ); } - tableRow.push({convertSecondsToFormattedDateTime(row.timestamp)}); - return {tableRow}; + + let timestampComponent = ( + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + ); + if (logEntry.spiff_task_guid) { + timestampComponent = ( + + + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + + + ); + } + tableRow.push(timestampComponent); + + return {tableRow}; }; const buildTable = () => { - const rows = processInstanceLogs.map((row) => { - return getTableRow(row); - }); + const rows = processInstanceLogs.map( + (logEntry: ProcessInstanceLogEntry) => { + return getTableRow(logEntry); + } + ); const tableHeaders = []; if (isDetailedView) { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 36c06d23..29d4bedc 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -7,12 +7,10 @@ import { useSearchParams, } from 'react-router-dom'; import { - CaretRight, TrashCan, StopOutline, PauseOutline, PlayOutline, - CaretLeft, InProgress, Checkmark, Warning, @@ -42,11 +40,14 @@ import { import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { + EventDefinition, PermissionsToCheck, ProcessData, ProcessInstance, ProcessInstanceMetadata, - ProcessInstanceTask, + Task, + TaskDefinitionPropertiesJson, + TaskIds, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -64,10 +65,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [processInstance, setProcessInstance] = useState(null); - const [tasks, setTasks] = useState(null); + const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); - const [taskToDisplay, setTaskToDisplay] = - useState(null); + const [taskToDisplay, setTaskToDisplay] = useState(null); + const [taskToTimeTravelTo, setTaskToTimeTravelTo] = useState( + null + ); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); const [showTaskDataLoading, setShowTaskDataLoading] = useState(false); @@ -127,41 +130,58 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } useEffect(() => { - if (permissionsLoaded) { - const processTaskFailure = () => { - setTasksCallHadError(true); - }; - let queryParams = ''; - const processIdentifier = searchParams.get('process_identifier'); - if (processIdentifier) { - queryParams = `?process_identifier=${processIdentifier}`; - } - let apiPath = '/process-instances/for-me'; - if (variant === 'all') { - apiPath = '/process-instances'; - } - HttpService.makeCallToBackend({ - path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, - successCallback: setProcessInstance, - }); - let taskParams = '?all_tasks=true&most_recent_tasks_only=true'; - if (typeof params.spiff_step !== 'undefined') { - taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; - } - let taskPath = ''; - if (ability.can('GET', taskListPath)) { - taskPath = `${taskListPath}${taskParams}`; - } - if (taskPath) { - HttpService.makeCallToBackend({ - path: taskPath, - successCallback: setTasks, - failureCallback: processTaskFailure, - }); - } else { - setTasksCallHadError(true); - } + if (!permissionsLoaded) { + return undefined; } + const processTaskFailure = () => { + setTasksCallHadError(true); + }; + const processTasksSuccess = (results: Task[]) => { + if (params.to_task_guid) { + const matchingTask = results.find( + (task: Task) => task.guid === params.to_task_guid + ); + if (matchingTask) { + setTaskToTimeTravelTo(matchingTask); + } + } + setTasks(results); + }; + let queryParams = ''; + const processIdentifier = searchParams.get('process_identifier'); + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } + HttpService.makeCallToBackend({ + path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, + successCallback: setProcessInstance, + }); + let taskParams = '?most_recent_tasks_only=true'; + if (typeof params.to_task_guid !== 'undefined') { + taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; + } + const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); + if (bpmnProcessGuid) { + taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; + } + let taskPath = ''; + if (ability.can('GET', taskListPath)) { + taskPath = `${taskListPath}${taskParams}`; + } + if (taskPath) { + HttpService.makeCallToBackend({ + path: taskPath, + successCallback: processTasksSuccess, + failureCallback: processTaskFailure, + }); + } else { + setTasksCallHadError(true); + } + return undefined; }, [ targetUris, params, @@ -211,21 +231,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const getTaskIds = () => { - const taskIds = { completed: [], readyOrWaiting: [] }; + const taskIds: TaskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { - const callingSubprocessId = searchParams.get('call_activity_task_id'); - tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) { - if ( - callingSubprocessId && - callingSubprocessId !== task.calling_subprocess_task_id - ) { - return null; - } + tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { - (taskIds.completed as any).push(task); - } - if (task.state === 'READY' || task.state === 'WAITING') { - (taskIds.readyOrWaiting as any).push(task); + taskIds.completed.push(task); + } else if (task.state === 'READY' || task.state === 'WAITING') { + taskIds.readyOrWaiting.push(task); } return null; }); @@ -233,72 +245,57 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return taskIds; }; - const currentSpiffStep = () => { - if (processInstance && typeof params.spiff_step === 'undefined') { - return processInstance.spiff_step || 0; + const currentToTaskGuid = () => { + if (taskToTimeTravelTo) { + return taskToTimeTravelTo.guid; } - - return Number(params.spiff_step); + return null; }; - const showingFirstSpiffStep = () => { - return currentSpiffStep() === 1; + // right now this just assume if taskToTimeTravelTo was passed in then + // this cannot be the active task. + // we may need a better way to figure this out. + const showingActiveTask = () => { + return !taskToTimeTravelTo; }; - const showingLastSpiffStep = () => { - return processInstance && currentSpiffStep() === processInstance.spiff_step; - }; - - const spiffStepLink = (label: any, spiffStep: number) => { + const queryParams = () => { const processIdentifier = searchParams.get('process_identifier'); - const callActivityTaskId = searchParams.get('call_activity_task_id'); + const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; if (processIdentifier) { queryParamArray.push(`process_identifier=${processIdentifier}`); } if (callActivityTaskId) { - queryParamArray.push(`call_activity_task_id=${callActivityTaskId}`); + queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`); } - let queryParams = ''; + let queryParamString = ''; if (queryParamArray.length > 0) { - queryParams = `?${queryParamArray.join('&')}`; + queryParamString = `?${queryParamArray.join('&')}`; } + return queryParamString; + }; + const completionViewLink = (label: any, taskGuid: string) => { return ( {label} ); }; - const previousStepLink = () => { - if (showingFirstSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() - 1); - }; - - const nextStepLink = () => { - if (showingLastSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() + 1); - }; - - const returnToLastSpiffStep = () => { - window.location.href = processInstanceShowPageBaseUrl; + const returnToProcessInstance = () => { + window.location.href = `${processInstanceShowPageBaseUrl}${queryParams()}`; }; const resetProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`, - successCallback: returnToLastSpiffStep, + path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`, + successCallback: returnToProcessInstance, httpMethod: 'POST', }); }; @@ -509,7 +506,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return
; }; - const processTaskResult = (result: ProcessInstanceTask) => { + const processTaskResult = (result: Task) => { if (result == null) { setTaskDataToDisplay(''); } else { @@ -518,15 +515,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setShowTaskDataLoading(false); }; - const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => { + const initializeTaskDataToDisplay = (task: Task | null) => { if ( task && - task.state === 'COMPLETED' && + (task.state === 'COMPLETED' || task.state === 'READY') && ability.can('GET', targetUris.processInstanceTaskDataPath) ) { setShowTaskDataLoading(true); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${task.task_spiff_step}`, + path: `${targetUris.processInstanceTaskDataPath}/${task.guid}`, httpMethod: 'GET', successCallback: processTaskResult, failureCallback: (error: any) => { @@ -577,13 +574,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { successCallback: handleProcessDataShowResponse, }); } else if (tasks) { - const matchingTask: any = tasks.find((task: any) => { - const callingSubprocessId = searchParams.get('call_activity_task_id'); + const matchingTask: Task | undefined = tasks.find((task: Task) => { return ( - (!callingSubprocessId || - callingSubprocessId === task.calling_subprocess_task_id) && - task.name === shapeElement.id && - bpmnProcessIdentifiers.includes(task.process_identifier) + task.bpmn_identifier === shapeElement.id && + bpmnProcessIdentifiers.includes( + task.bpmn_process_definition_identifier + ) ); }); if (matchingTask) { @@ -600,7 +596,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const getTaskById = (taskId: string) => { if (tasks !== null) { - return tasks.find((task: any) => task.id === taskId); + return tasks.find((task: Task) => task.guid === taskId) || null; } return null; }; @@ -609,81 +605,86 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { console.log('result', result); }; + const getParentTaskFromTask = (task: Task) => { + return task.properties_json.parent; + }; + const createScriptUnitTest = () => { if (taskToDisplay) { - const taskToUse: any = taskToDisplay; - const previousTask: any = getTaskById(taskToUse.parent); + const previousTask: Task | null = getTaskById( + getParentTaskFromTask(taskToDisplay) + ); HttpService.makeCallToBackend({ path: `/process-models/${modifiedProcessModelId}/script-unit-tests`, httpMethod: 'POST', successCallback: processScriptUnitTestCreateResult, postBody: { - bpmn_task_identifier: taskToUse.name, - input_json: previousTask.data, - expected_output_json: taskToUse.data, + bpmn_task_identifier: taskToDisplay.bpmn_identifier, + input_json: previousTask ? previousTask.data : '', + expected_output_json: taskToDisplay.data, }, }); } }; - const isCurrentTask = (task: any) => { + const isActiveTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', - 'Call Activity', + 'CallActivity', 'Transactional Subprocess', ]; return ( (task.state === 'WAITING' && - subprocessTypes.filter((t) => t === task.type).length > 0) || + subprocessTypes.filter((t) => t === task.typename).length > 0) || task.state === 'READY' ); }; - const canEditTaskData = (task: any) => { + const canEditTaskData = (task: Task) => { return ( processInstance && ability.can('PUT', targetUris.processInstanceTaskDataPath) && - isCurrentTask(task) && + isActiveTask(task) && processInstance.status === 'suspended' && - showingLastSpiffStep() + showingActiveTask() ); }; - const canSendEvent = (task: any) => { + const canSendEvent = (task: Task) => { // We actually could allow this for any waiting events const taskTypes = ['Event Based Gateway']; return ( processInstance && processInstance.status === 'waiting' && ability.can('POST', targetUris.processInstanceSendEventPath) && - taskTypes.filter((t) => t === task.type).length > 0 && + taskTypes.filter((t) => t === task.typename).length > 0 && task.state === 'WAITING' && - showingLastSpiffStep() + showingActiveTask() ); }; - const canCompleteTask = (task: any) => { + const canCompleteTask = (task: Task) => { return ( processInstance && processInstance.status === 'suspended' && ability.can('POST', targetUris.processInstanceCompleteTaskPath) && - isCurrentTask(task) && - showingLastSpiffStep() + isActiveTask(task) && + showingActiveTask() ); }; - const canResetProcess = (task: any) => { + const canResetProcess = (task: Task) => { return ( ability.can('POST', targetUris.processInstanceResetPath) && processInstance && processInstance.status === 'suspended' && task.state === 'READY' && - !showingLastSpiffStep() + !showingActiveTask() ); }; - const getEvents = (task: any) => { - const handleMessage = (eventDefinition: any) => { + const getEvents = (task: Task) => { + const handleMessage = (eventDefinition: EventDefinition) => { if (eventDefinition.typename === 'MessageEventDefinition') { const newEvent = eventDefinition; delete newEvent.message_var; @@ -693,7 +694,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return eventDefinition; }; if (task.event_definition && task.event_definition.event_definitions) - return task.event_definition.event_definitions.map((e: any) => + return task.event_definition.event_definitions.map((e: EventDefinition) => handleMessage(e) ); if (task.event_definition) return [handleMessage(task.event_definition)]; @@ -717,7 +718,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setEditingTaskData(false); const dataObject = taskDataStringToObject(taskDataToDisplay); if (taskToDisplay) { - const taskToDisplayCopy: ProcessInstanceTask = { + const taskToDisplayCopy: Task = { ...taskToDisplay, data: dataObject, }; // spread operator @@ -730,13 +731,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (!taskToDisplay) { return; } - console.log('saveTaskData'); removeError(); // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`, + path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.guid}`, httpMethod: 'PUT', successCallback: saveTaskDataResult, failureCallback: addError, @@ -759,20 +759,21 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const completeTask = (execute: boolean) => { - const taskToUse: any = taskToDisplay; - HttpService.makeCallToBackend({ - path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`, - httpMethod: 'POST', - successCallback: returnToLastSpiffStep, - postBody: { execute }, - }); + if (taskToDisplay) { + HttpService.makeCallToBackend({ + path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`, + httpMethod: 'POST', + successCallback: returnToProcessInstance, + postBody: { execute }, + }); + } }; - const taskDisplayButtons = (task: any) => { + const taskDisplayButtons = (task: Task) => { const buttons = []; if ( - task.type === 'Script Task' && + task.typename === 'Script Task' && ability.can('PUT', targetUris.processModelShowPath) ) { buttons.push( @@ -785,11 +786,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); } - if (task.type === 'Call Activity') { + if (task.typename === 'CallActivity') { + console.log('task', task); + const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson = + task.task_definition_properties_json; + console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson); buttons.push( View Call Activity Diagram @@ -971,12 +976,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const taskUpdateDisplayArea = () => { - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + if (!taskToDisplay) { + return null; + } + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; const candidateEvents: any = getEvents(taskToUse); if (taskToDisplay) { - let taskTitleText = taskToUse.id; - if (taskToUse.title) { - taskTitleText += ` (${taskToUse.title})`; + let taskTitleText = taskToUse.guid; + if (taskToUse.bpmn_name) { + taskTitleText += ` (${taskToUse.bpmn_name})`; } return ( - {taskToUse.name} ( - {taskToUse.type} + {taskToUse.bpmn_identifier} ( + {taskToUse.typename} ): {taskToUse.state} {taskDisplayButtons(taskToUse)} - {taskToUse.task_spiff_step ? ( +
+ + Guid: {taskToUse.guid} + +
+ {taskToUse.state === 'COMPLETED' ? (
- Task completed at step:{' '} - {spiffStepLink( - `${taskToUse.task_spiff_step}`, - taskToUse.task_spiff_step + {completionViewLink( + 'View process instance at the time when this task was active.', + taskToUse.guid )}
@@ -1012,23 +1024,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return null; }; - const stepsElement = () => { - if (!processInstance) { - return null; - } - return ( - - - - {previousStepLink()} - Step {currentSpiffStep()} of {processInstance.spiff_step} - {nextStepLink()} - - - - ); - }; - const buttonIcons = () => { if (!processInstance) { return null; @@ -1063,6 +1058,39 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return elements; }; + const viewMostRecentStateComponent = () => { + if (!taskToTimeTravelTo) { + return null; + } + const title = `${taskToTimeTravelTo.id}: ${taskToTimeTravelTo.guid}: ${taskToTimeTravelTo.bpmn_identifier}`; + return ( + <> + + +

+ Viewing process instance at the time when{' '} + + + {taskToTimeTravelTo.bpmn_name || + taskToTimeTravelTo.bpmn_identifier} + + {' '} + was active.{' '} + + View current process instance state. + +

+
+
+
+ + ); + }; + if (processInstance && (tasks || tasksCallHadError)) { const taskIds = getTaskIds(); const processModelId = unModifyProcessIdentifierForPathParam( @@ -1116,8 +1144,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {taskUpdateDisplayArea()} {processDataDisplayArea()} {processInstanceMetadataArea()} - {stepsElement()}
+ {viewMostRecentStateComponent()} { + return `/connector-proxy/type-ahead/${category}?prefix=${inputText}&limit=100`; + }; + + const lastSearchTerm = useRef(''); + const [items, setItems] = useState([]); + const [selectedItem, setSelectedItem] = useState(null); + const itemFormatRegex = /[^{}]+(?=})/g; + const itemFormatSubstitutions = itemFormat.match(itemFormatRegex); + + const itemToString = (item: any) => { + if (!item) { + return null; + } + + let str = itemFormat; + itemFormatSubstitutions.forEach((key: string) => { + str = str.replace(`{${key}}`, item[key]); + }); + return str; + }; + + const handleTypeAheadResult = (result: any, inputText: string) => { + if (lastSearchTerm.current === inputText) { + setItems(result); + } + }; + + const typeAheadSearch = (inputText: string) => { + if (inputText) { + lastSearchTerm.current = inputText; + // TODO: check cache of prefixes -> results + HttpService.makeCallToBackend({ + path: pathForCategory(inputText), + successCallback: (result: any) => + handleTypeAheadResult(result, inputText), + }); + } + }; + + return ( + { + setSelectedItem(event.selectedItem); + onChange(itemToString(event.selectedItem)); + }} + id={id} + items={items} + itemToString={itemToString} + placeholder={`Start typing to search for ${category}...`} + titleText={`Type ahead search for ${category}`} + selectedItem={selectedItem} + /> + ); +} + +class UnexpectedHumanTaskType extends Error { + constructor(message: string) { + super(message); + this.name = 'UnexpectedHumanTaskType'; + } +} + +enum FormSubmitType { + Default, + Draft, +} + export default function TaskShow() { const [task, setTask] = useState(null); const [userTasks] = useState(null); @@ -28,8 +109,14 @@ export default function TaskShow() { const navigate = useNavigate(); const [disabled, setDisabled] = useState(false); + // save current form data so that we can avoid validations in certain situations + const [currentFormObject, setCurrentFormObject] = useState({}); + const { addError, removeError } = useAPIError(); + // eslint-disable-next-line sonarjs/no-duplicate-string + const supportedHumanTaskTypes = ['User Task', 'Manual Task']; + useEffect(() => { const processResult = (result: ProcessInstanceTask) => { setTask(result); @@ -76,16 +163,24 @@ export default function TaskShow() { } }; - const handleFormSubmit = (event: any) => { + const handleFormSubmit = ( + formObject: any, + _event: any, + submitType: FormSubmitType = FormSubmitType.Default + ) => { if (disabled) { return; } + let queryParams = ''; + if (submitType === FormSubmitType.Draft) { + queryParams = '?save_as_draft=true'; + } setDisabled(true); removeError(); - const dataToSubmit = event.formData; + const dataToSubmit = formObject.formData; delete dataToSubmit.isManualTask; HttpService.makeCallToBackend({ - path: `/tasks/${params.process_instance_id}/${params.task_id}`, + path: `/tasks/${params.process_instance_id}/${params.task_id}${queryParams}`, successCallback: processSubmitResult, failureCallback: (error: any) => { addError(error); @@ -183,6 +278,11 @@ export default function TaskShow() { return errors; }; + const updateFormData = (formObject: any) => { + currentFormObject.formData = formObject.formData; + setCurrentFormObject(currentFormObject); + }; + const formElement = () => { if (!task) { return null; @@ -226,16 +326,35 @@ export default function TaskShow() { } if (task.state === 'READY') { - let buttonText = 'Submit'; + let submitButtonText = 'Submit'; + let saveAsDraftButton = null; if (task.type === 'Manual Task') { - buttonText = 'Continue'; + submitButtonText = 'Continue'; + } else if (task.type === 'User Task') { + saveAsDraftButton = ( + + ); + } else { + throw new UnexpectedHumanTaskType( + `Invalid task type given: ${task.type}. Only supported types: ${supportedHumanTaskTypes}` + ); } reactFragmentToHideSubmitButton = ( -
- -
+ {saveAsDraftButton} + ); } @@ -243,6 +362,8 @@ export default function TaskShow() { return getFieldsWithDateValidations(jsonSchema, formData, errors); }; + const widgets = { typeAhead: TypeAheadWidget }; + return ( @@ -252,8 +373,12 @@ export default function TaskShow() { onSubmit={handleFormSubmit} schema={jsonSchema} uiSchema={formUiSchema} + widgets={widgets} validator={validator} + onChange={updateFormData} customValidate={customValidate} + omitExtraData + liveOmit > {reactFragmentToHideSubmitButton}